diff --git a/.asf.yaml b/.asf.yaml index 0bff102a9c951..16d7a0e68e3bb 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -32,6 +32,8 @@ github: issues: true # Enable projects for project management boards projects: true + # Enable wiki for documentation + wiki: false enabled_merge_buttons: squash: true diff --git a/.coveragerc b/.coveragerc index 83805150083e5..7d4898e37189d 100644 --- a/.coveragerc +++ b/.coveragerc @@ -24,3 +24,6 @@ omit = airflow/migrations/* airflow/www/node_modules/** airflow/www_rbac/node_modules/** + +[run] +relative_files = True diff --git a/.github/workflows/cancel_other_workflow_runs.yml b/.github/workflows/cancel_other_workflow_runs.yml new file mode 100644 index 0000000000000..54319180d936d --- /dev/null +++ b/.github/workflows/cancel_other_workflow_runs.yml @@ -0,0 +1,36 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: Cancel other workflow runs +on: + schedule: + - cron: '*/5 * * * *' +jobs: + cancel-other-workflow-runs: + if: github.repository == 'apache/airflow' + timeout-minutes: 10 + name: "Cancel other workflow runs" + runs-on: ubuntu-latest + steps: + - uses: potiuk/cancel-workflow-runs@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + workflow: ci.yml + failFastJobNames: > + ["^Static checks.*", "^Build docs$", "^Backport packages$", + "^Checks: Helm tests$", "^Build prod image .*", "^Test OpenAPI.*"] diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8626d4c223975..6e2c47b1b9c0e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,6 +28,7 @@ on: env: MOUNT_LOCAL_SOURCES: "false" + MOUNT_FILES: "true" FORCE_ANSWER_TO_QUESTIONS: "yes" SKIP_CHECK_REMOTE_IMAGE: "true" SKIP_CI_IMAGE_CHECK: "true" @@ -47,36 +48,28 @@ jobs: name: "Cancel previous workflow run" runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Get ci workflow id - run: "scripts/ci/cancel/get_workflow_id.sh" - env: - WORKFLOW: ci - GITHUB_TOKEN: ${{ github.token }} - GITHUB_REPOSITORY: ${{ github.repositoru }} - - name: Cancel workflow ${{ github.workflow }} - uses: styfle/cancel-workflow-action@0.3.2 - with: - workflow_id: ${{ env.WORKFLOW_ID }} - access_token: ${{ github.token }} + - uses: potiuk/cancel-workflow-runs@v1 + with: + workflow: ci.yaml + token: ${{ secrets.GITHUB_TOKEN }} static-checks: timeout-minutes: 60 - name: "Static checks" + name: "Static checks: no pylint" runs-on: ubuntu-latest - needs: - - cancel-previous-workflow-run + needs: [cancel-previous-workflow-run] env: MOUNT_SOURCE_DIR_FOR_STATIC_CHECKS: "true" + SKIP: "pylint" steps: - uses: actions/checkout@v2 - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v2 with: - python-version: '3.x' + python-version: '3.7' - name: Cache pre-commit env uses: actions/cache@v2 env: - cache-name: cache-pre-commit-v1 + cache-name: cache-pre-commit-no-pylint-v1 with: path: ~/.cache/pre-commit key: ${{ env.cache-name }}-${{ github.job }}-${{ hashFiles('.pre-commit-config.yaml') }} @@ -84,57 +77,92 @@ jobs: run: ./scripts/ci/tools/ci_free_space_on_ci.sh - name: "Build CI image" run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - - name: "Static checks" + - name: "Static checks: no pylint" run: ./scripts/ci/static_checks/ci_run_static_checks.sh - - name: "Cancel workflow on static checks failure" - if: ${{ failure() }} - uses: andymckay/cancel-action@0.2 + + static-checks-pylint: + timeout-minutes: 60 + name: "Static checks: pylint" + runs-on: ubuntu-latest + needs: [cancel-previous-workflow-run] + env: + MOUNT_SOURCE_DIR_FOR_STATIC_CHECKS: "true" + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.7' + - name: Cache pre-commit env + uses: actions/cache@v2 + env: + cache-name: cache-pre-commit-pylint-v1 + with: + path: ~/.cache/pre-commit + key: ${{ env.cache-name }}-${{ github.job }}-${{ hashFiles('.pre-commit-config.yaml') }} + - name: "Free space" + run: ./scripts/ci/tools/ci_free_space_on_ci.sh + - name: "Build CI image" + run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh + - name: "Static checks: pylint" + run: ./scripts/ci/static_checks/ci_run_static_checks.sh pylint + docs: timeout-minutes: 60 name: "Build docs" runs-on: ubuntu-latest - needs: - - cancel-previous-workflow-run + needs: [cancel-previous-workflow-run] steps: - uses: actions/checkout@v2 - name: "Build CI image ${{ matrix.python-version }}" run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - name: "Build docs" run: ./scripts/ci/docs/ci_docs.sh - - name: "Cancel workflow on docs failure" - if: ${{ failure() }} - uses: andymckay/cancel-action@0.2 + - uses: actions/upload-artifact@v2 + name: Upload documentation + if: always() + with: + name: airflow-documentation + path: './files/documentation' prepare-backport-packages: timeout-minutes: 60 name: "Backport packages" runs-on: ubuntu-latest - needs: - - cancel-previous-workflow-run + needs: [cancel-previous-workflow-run] env: INSTALL_AIRFLOW_VERSION: "1.10.10" PYTHON_MAJOR_MINOR_VERSION: 3.6 steps: - uses: actions/checkout@v2 - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v2 with: - python-version: '3.x' + python-version: '3.7' - name: "Free space" run: ./scripts/ci/tools/ci_free_space_on_ci.sh - name: "Build CI image ${{ matrix.python-version }}" run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - name: "Prepare & test backport packages" run: "./scripts/ci/backport_packages/ci_prepare_and_test_backport_packages.sh" - - name: "Cancel workflow on backport packages failure" - if: ${{ failure() }} - uses: andymckay/cancel-action@0.2 + - uses: actions/upload-artifact@v2 + name: Upload packages + if: always() + with: + name: > + airflow-backport-packages + path: './files/airflow-packages-*' + - uses: actions/upload-artifact@v2 + name: Upload readmes + if: always() + with: + name: > + airflow-backport-readmes + path: './files/airflow-backport-readme*' trigger-tests: timeout-minutes: 5 name: "Checks if tests should be run" runs-on: ubuntu-latest - needs: - - cancel-previous-workflow-run + needs: [cancel-previous-workflow-run] outputs: run-tests: ${{ steps.trigger-tests.outputs.run-tests }} steps: @@ -174,9 +202,9 @@ jobs: if: needs.trigger-tests.outputs.run-tests == 'true' || github.event_name != 'pull_request' steps: - uses: actions/checkout@v2 - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v2 with: - python-version: '3.x' + python-version: '3.7' - name: "Free space" run: ./scripts/ci/tools/ci_free_space_on_ci.sh - uses: engineerd/setup-kind@v0.4.0 @@ -190,7 +218,7 @@ jobs: - name: Cache virtualenv for kubernetes testing uses: actions/cache@v2 env: - cache-name: cache-kubernetes-tests-virtualenv-v2 + cache-name: cache-kubernetes-tests-virtualenv-v3 with: path: .build/.kubernetes_venv key: "${{ env.cache-name }}-${{ github.job }}-v1" @@ -198,11 +226,15 @@ jobs: run: ./scripts/ci/kubernetes/ci_run_kubernetes_tests.sh - uses: actions/upload-artifact@v2 name: Upload KinD logs - # Always run this, even if one of th previous steps failed. if: always() with: - name: 'kind-logs-${{matrix.kube-mode}}-${{matrix.python-version}}-${{matrix.kubernetes-version}}' - path: '/tmp/kind_logs_*' + name: kind-logs-${{matrix.kube-mode}}-${{matrix.python-version}}-${{matrix.kubernetes-version}} + path: /tmp/kind_logs_* + - uses: actions/upload-artifact@v2 + name: Upload artifact for coverage + with: + name: coverage-k8s-${{matrix.kube-mode}}-${{matrix.python-version}}-${{matrix.kubernetes-version}} + path: ./files/coverage.xml tests-postgres: timeout-minutes: 80 @@ -224,15 +256,27 @@ jobs: if: needs.trigger-tests.outputs.run-tests == 'true' || github.event_name != 'pull_request' steps: - uses: actions/checkout@v2 - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v2 with: - python-version: '3.x' + python-version: '3.7' - name: "Free space" run: ./scripts/ci/tools/ci_free_space_on_ci.sh - name: "Build CI image ${{ matrix.python-version }}" run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - name: "Tests" run: ./scripts/ci/testing/ci_run_airflow_testing.sh + - uses: actions/upload-artifact@v2 + name: Upload airflow logs + if: always() + with: + name: airflow-logs-${{matrix.test-type}}-${{matrix.python-version}}-${{matrix.postgres-version}} + path: './files/airflow_logs*' + - uses: actions/upload-artifact@v2 + name: Upload artifact for coverage + with: + name: > + coverage-postgres-${{matrix.test-type}}-${{matrix.python-version}}-${{matrix.postgres-version}} + path: ./files/coverage.xml tests-mysql: timeout-minutes: 80 @@ -254,15 +298,26 @@ jobs: if: needs.trigger-tests.outputs.run-tests == 'true' || github.event_name != 'pull_request' steps: - uses: actions/checkout@v2 - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v2 with: - python-version: '3.x' + python-version: '3.7' - name: "Free space" run: ./scripts/ci/tools/ci_free_space_on_ci.sh - name: "Build CI image ${{ matrix.python-version }}" run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - name: "Tests" run: ./scripts/ci/testing/ci_run_airflow_testing.sh + - uses: actions/upload-artifact@v2 + name: Upload airflow logs + if: always() + with: + name: airflow-logs-${{matrix.test-type}}-${{matrix.python-version}}-${{matrix.mysql-version}} + path: './files/airflow_logs*' + - uses: actions/upload-artifact@v2 + name: Upload artifact for coverage + with: + name: coverage-mysql-${{matrix.test-type}}-${{matrix.python-version}}-${{matrix.mysql-version}} + path: ./files/coverage.xml tests-sqlite: timeout-minutes: 80 @@ -277,24 +332,35 @@ jobs: env: BACKEND: sqlite PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} - TEST_TYPE: ${{ matrix.test-type }} RUN_TESTS: "true" + TEST_TYPE: ${{ matrix.test-type }} if: needs.trigger-tests.outputs.run-tests == 'true' || github.event_name != 'pull_request' steps: - uses: actions/checkout@v2 - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v2 with: - python-version: '3.x' + python-version: '3.7' - name: "Free space" run: ./scripts/ci/tools/ci_free_space_on_ci.sh - name: "Build CI image ${{ matrix.python-version }}" run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - name: "Tests" run: ./scripts/ci/testing/ci_run_airflow_testing.sh + - uses: actions/upload-artifact@v2 + name: Upload airflow logs + if: always() + with: + name: airflow-logs-${{matrix.test-type}}-${{matrix.python-version}} + path: './files/airflow_logs*' + - uses: actions/upload-artifact@v2 + name: Upload artifact for coverage + with: + name: coverage-sqlite-${{matrix.test-type}}-${{matrix.python-version}} + path: ./files/coverage.xml tests-quarantined: timeout-minutes: 80 - name: "${{matrix.test-type}}:Pg${{matrix.postgres-version}},Py${{matrix.python-version}}" + name: "Quarantined tests" runs-on: ubuntu-latest continue-on-error: true needs: [trigger-tests] @@ -302,59 +368,104 @@ jobs: matrix: python-version: [3.6] postgres-version: [9.6] - test-type: [Quarantined] fail-fast: false env: BACKEND: postgres PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} POSTGRES_VERSION: ${{ matrix.postgres-version }} RUN_TESTS: "true" - TEST_TYPE: ${{ matrix.test-type }} + TEST_TYPE: Quarantined + NUM_RUNS: 10 + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} if: needs.trigger-tests.outputs.run-tests == 'true' || github.event_name != 'pull_request' steps: - uses: actions/checkout@v2 - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v2 with: - python-version: '3.x' + python-version: '3.7' + - name: "Set issue id for master" + if: github.ref == 'refs/heads/master' + run: | + echo "::set-env name=ISSUE_ID::10118" + - name: "Set issue id for v1-10-stable" + if: github.ref == 'refs/heads/v1-10-stable' + run: | + echo "::set-env name=ISSUE_ID::10127" + - name: "Set issue id for v1-10-test" + if: github.ref == 'refs/heads/v1-10-test' + run: | + echo "::set-env name=ISSUE_ID::10128" - name: "Free space" run: ./scripts/ci/tools/ci_free_space_on_ci.sh - name: "Build CI image ${{ matrix.python-version }}" run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - name: "Tests" run: ./scripts/ci/testing/ci_run_airflow_testing.sh + - uses: actions/upload-artifact@v2 + name: Upload Quarantine test results + if: always() + with: + name: 'quarantined_tests' + path: 'files/test_result.xml' + - uses: actions/upload-artifact@v2 + name: Upload airflow logs + if: always() + with: + name: airflow-logs-quarantined-${{matrix.python-version}}-${{ matrix.postgres-version }} + path: './files/airflow_logs*' + - uses: actions/upload-artifact@v2 + name: Upload artifact for coverage + with: + name: coverage-quarantined-${{matrix.test-type}}-${{matrix.python-version}} + path: ./files/coverage.xml + + upload-coverage: + timeout-minutes: 5 + name: "Upload coverage" + runs-on: ubuntu-latest + continue-on-error: true + needs: + - tests-kubernetes + - tests-postgres + - tests-sqlite + - tests-mysql + - tests-quarantined + steps: + - uses: actions/download-artifact@v2 + name: Download all artifacts from the current build + with: + path: ./coverage-files + - name: Removes unnecessary artifacts + run: ls ./coverage-files | grep -v coverage | xargs rm -rf + - uses: codecov/codecov-action@v1 + name: Upload all coverage reports to codecov + with: + directory: ./coverage-files helm-tests: timeout-minutes: 5 name: "Checks: Helm tests" runs-on: ubuntu-latest - needs: - - cancel-previous-workflow-run + needs: [cancel-previous-workflow-run] steps: - uses: actions/checkout@v2 - name: "Helm Tests" run: ./scripts/ci/kubernetes/ci_run_helm_testing.sh - - name: "Cancel workflow on helm-tests failure" - if: ${{ failure() }} - uses: andymckay/cancel-action@0.2 build-prod-image: timeout-minutes: 60 name: "Build prod image Py${{ matrix.python-version }}" runs-on: ubuntu-latest + needs: [cancel-previous-workflow-run] strategy: matrix: python-version: [3.6, 3.7, 3.8] - needs: - - cancel-previous-workflow-run env: PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} steps: - uses: actions/checkout@v2 - name: "Build PROD image ${{ matrix.python-version }}" run: ./scripts/ci/images/ci_prepare_prod_image_on_ci.sh - - name: "Cancel workflow on build prod image failure" - if: ${{ failure() }} - uses: andymckay/cancel-action@0.2 push-prod-images-to-github-cache: timeout-minutes: 80 @@ -421,7 +532,6 @@ jobs: python-version: [3.6, 3.7, 3.8] fail-fast: false needs: - - cancel-previous-workflow-run - tests-sqlite - tests-mysql - tests-postgres @@ -433,7 +543,7 @@ jobs: github.event_name != 'pull' steps: - uses: actions/checkout@v2 - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v2 - name: "Free space" run: ./scripts/ci/tools/ci_free_space_on_ci.sh - name: "Build CI image ${{ matrix.python-version }}" @@ -444,7 +554,7 @@ jobs: name: Upload constraint artifacts with: name: 'constraints-${{matrix.python-version}}' - path: 'files/constraints-${{matrix.python-version}}/constraints-${{matrix.python-version}}.txt' + path: './files/constraints-${{matrix.python-version}}/constraints-${{matrix.python-version}}.txt' constraints-push: timeout-minutes: 10 @@ -533,23 +643,8 @@ jobs: test-openapi-client-generation: name: "Test OpenAPI client generation" runs-on: ubuntu-latest - needs: - - cancel-previous-workflow-run + needs: [cancel-previous-workflow-run] steps: - uses: actions/checkout@v2 - - name: Get workflow id - run: "scripts/ci/cancel/get_workflow_id.sh" - env: - WORKFLOW: openapi - GITHUB_TOKEN: ${{ github.token }} - GITHUB_REPOSITORY: ${{ github.repositoru }} - - name: Cancel workflow ${{ github.workflow }} - uses: styfle/cancel-workflow-action@0.3.2 - with: - workflow_id: ${{ env.WORKFLOW_ID }} - access_token: ${{ github.token }} - name: "Generate client codegen diff" run: ./scripts/ci/openapi/client_codegen_diff.sh - - name: "Cancel workflow on openapi failure" - if: ${{ failure() }} - uses: andymckay/cancel-action@0.2 diff --git a/.github/workflows/quarantined.yaml b/.github/workflows/quarantined.yaml new file mode 100644 index 0000000000000..6faa237f6ad6c --- /dev/null +++ b/.github/workflows/quarantined.yaml @@ -0,0 +1,108 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: Quarantined Build +on: + schedule: + # Run quarantined builds 4 times a day to gather better quarantine stats + - cron: '35 */6 * * *' + +env: + MOUNT_LOCAL_SOURCES: "false" + MOUNT_FILES: "true" + FORCE_ANSWER_TO_QUESTIONS: "yes" + SKIP_CHECK_REMOTE_IMAGE: "true" + SKIP_CI_IMAGE_CHECK: "true" + DB_RESET: "true" + VERBOSE: "true" + UPGRADE_TO_LATEST_CONSTRAINTS: ${{ github.event_name == 'push' || github.event_name == 'scheduled' }} + PYTHON_MAJOR_MINOR_VERSION: 3.6 + USE_GITHUB_REGISTRY: "true" + CACHE_IMAGE_PREFIX: ${{ github.repository }} + CACHE_REGISTRY_USERNAME: ${{ github.actor }} + CACHE_REGISTRY_PASSWORD: ${{ secrets.GITHUB_TOKEN }} + +jobs: + + trigger-tests: + timeout-minutes: 5 + name: "Checks if tests should be run" + runs-on: ubuntu-latest + outputs: + run-tests: ${{ steps.trigger-tests.outputs.run-tests }} + steps: + - uses: actions/checkout@v2 + - name: "Check if tests should be run" + run: "./scripts/ci/tools/ci_check_if_tests_should_be_run.sh" + id: trigger-tests + + tests-quarantined: + timeout-minutes: 80 + name: "Quarantined tests" + runs-on: ubuntu-latest + continue-on-error: true + needs: [trigger-tests] + strategy: + matrix: + python-version: [3.6] + postgres-version: [9.6] + fail-fast: false + env: + BACKEND: postgres + PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} + POSTGRES_VERSION: ${{ matrix.postgres-version }} + RUN_TESTS: "true" + TEST_TYPE: Quarantined + NUM_RUNS: 10 + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + if: needs.trigger-tests.outputs.run-tests == 'true' || github.event_name != 'pull_request' + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.7' + - name: "Set issue id for master" + if: github.ref == 'refs/heads/master' + run: | + echo "::set-env name=ISSUE_ID::10118" + - name: "Set issue id for v1-10-stable" + if: github.ref == 'refs/heads/v1-10-stable' + run: | + echo "::set-env name=ISSUE_ID::10127" + - name: "Set issue id for v1-10-test" + if: github.ref == 'refs/heads/v1-10-test' + run: | + echo "::set-env name=ISSUE_ID::10128" + - name: "Free space" + run: ./scripts/ci/tools/ci_free_space_on_ci.sh + - name: "Build CI image ${{ matrix.python-version }}" + run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh + - name: "Tests" + run: ./scripts/ci/testing/ci_run_airflow_testing.sh + - uses: actions/upload-artifact@v2 + name: Upload Quarantine test results + if: always() + with: + name: 'quarantined_tests' + path: 'files/test_result.xml' + - uses: actions/upload-artifact@v2 + name: Upload airflow logs + if: always() + with: + name: airflow-logs-quarantined-${{matrix.python-version}}-${{ matrix.postgres-version }} + path: './files/airflow_logs*' diff --git a/.github/workflows/repo_sync.yml b/.github/workflows/repo_sync.yml new file mode 100644 index 0000000000000..7a7e0b0eb0d98 --- /dev/null +++ b/.github/workflows/repo_sync.yml @@ -0,0 +1,17 @@ +# File: .github/workflows/repo-sync.yml +name: Force sync master from apache/airflow +on: + workflow_dispatch: +jobs: + repo-sync: + if: github.repository != 'apache/airflow' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@master + - name: repo-sync + uses: repo-sync/github-sync@v2 + with: + source_repo: "apache/airflow" + source_branch: "master" + destination_branch: "master" + github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b7bacaeafaa44..a4cd756116282 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -111,6 +111,7 @@ repos: name: Add license for all yaml files exclude: ^\.github/.*$ types: [yaml] + files: \.yml$|\.yaml$ args: - --comment-style - "|#|" @@ -194,7 +195,7 @@ repos: - id: lint-openapi name: Lint OpenAPI using speccy language: docker_image - entry: wework/speccy lint + entry: wework/speccy lint -r default -r ./scripts/ci/speccy_rules/connexion.yml files: ^airflow/api_connexion/openapi/ - id: lint-openapi name: Lint OpenAPI using openapi-spec-validator @@ -323,6 +324,12 @@ repos: entry: "(airflow\\.){0,1}utils\\.dates\\.days_ago" files: \.py$ pass_filenames: true + - id: restrict-start_date-in-default_args-in-example_dags + language: pygrep + name: "'start_date' should not be defined in default_args in example_dags" + entry: "default_args\\s*=\\s*{\\s*(\"|')start_date(\"|')|(\"|')start_date(\"|'):" + files: \.*example_dags.*.py$ + pass_filenames: true - id: check-integrations name: Check if integration list is aligned entry: ./scripts/ci/pre_commit/pre_commit_check_integrations.sh @@ -357,20 +364,12 @@ repos: exclude: ^dev|^backport_packages require_serial: true - id: pylint - name: Run pylint for main sources + name: Run pylint language: system - entry: "./scripts/ci/pre_commit/pre_commit_pylint_main.sh" + entry: "./scripts/ci/pre_commit/pre_commit_pylint.sh" files: \.py$ - exclude: ^tests/.*\.py$|^scripts/.*\.py$|^dev|^backport_packages|^kubernetes_tests + exclude: ^scripts/.*\.py$|^dev|^backport_packages pass_filenames: true - require_serial: true # Pylint tests should be run in one chunk to detect all cycles - - id: pylint-tests - name: Run pylint for tests - language: system - entry: "./scripts/ci/pre_commit/pre_commit_pylint_tests.sh" - files: ^tests/.*\.py$ - pass_filenames: true - require_serial: true - id: flake8 name: Run flake8 language: system diff --git a/CI.rst b/CI.rst index 41d22fb620118..2844a5c3c20ce 100644 --- a/CI.rst +++ b/CI.rst @@ -67,8 +67,8 @@ The following components are part of the CI infrastructure CI run types ============ -The following CI Job runs are currently run for Apache Airflow, and each of the runs have different -purpose and context. +The following CI Job run types are currently run for Apache Airflow (run by ci.yaml workflow and +quarantined.yaml workflows) and each of the run types have different purpose and context. Pull request run ---------------- @@ -126,7 +126,17 @@ DockerHub when pushing ``v1-10-stable`` manually. All runs consist of the same jobs, but the jobs behave slightly differently or they are skipped in different run categories. Here is a summary of the run categories with regards of the jobs they are running. Those jobs often have matrix run strategy which runs several different variations of the jobs -(with different Backend type / Python version, type of the tests to run for example) +(with different Backend type / Python version, type of the tests to run for example). The following chapter +describes the workflows that execute for each run. + +Workflows +========= + +CI Build Workflow +----------------- + +This workflow is a regular workflow that performs the regular checks - none of the jobs should fail. +The tests to run do not contain quarantined tests. +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+ | Job | Description | Pull Request Run | Direct Push/Merge Run | Scheduled Run | @@ -148,8 +158,6 @@ Those jobs often have matrix run strategy which runs several different variation +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+ | Tests Kubernetes | Run Kubernetes test | Yes (if tests-triggered) | Yes | Yes * | +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+ -| Quarantined tests | Those are tests that are flaky and we need to fix them | Yes (if tests-triggered) | Yes | Yes * | -+---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+ | Test OpenAPI client gen | Tests if OpenAPIClient continues to generate | Yes | Yes | Yes * | +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+ | Helm tests | Runs tests for the Helm chart | Yes | Yes | Yes * | @@ -164,3 +172,48 @@ Those jobs often have matrix run strategy which runs several different variation +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+ | Tag Repo nightly | Tags the repository with nightly tagIt is a lightweight tag that moves nightly | - | - | Yes. Triggers DockerHub build for public registry | +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+ + +Quarantined build workflow +-------------------------- + +This workflow runs only quarantined tests. Those tests do not fail the build even if some tests fail (only if +the whole pytest execution fails). Instead this workflow updates one of the issues where we keep status +of quarantined tests. Once the test succeeds in NUM_RUNS subsequent runs, it is marked as stable and +can be removed from quarantine. You can read more about quarantine in ``_ + +The issues are only updated if the test is run as direct push or scheduled run and only in the +``apache/airflow`` repository - so that the issues are not updated in forks. + +The issues that gets updated are different for different branches: + +* master: `Quarantine tests master `_ +* v1-10-stable: `Quarantine tests v1-10-stable `_ +* v1-10-test: `Quarantine tests v1-10-test `_ + ++---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+ +| Job | Description | Pull Request Run | Direct Push/Merge Run | Scheduled Run | ++===========================+================================================================================================================+====================================+=================================+======================================================================+ +| Cancel previous workflow | Cancels the previously running workflow run if there is one running | Yes | Yes | Yes * | ++---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+ +| Trigger tests | Checks if tests should be triggered | Yes | Yes | Yes * | ++---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+ +| Quarantined tests | Those are tests that are flaky and we need to fix them | Yes (if tests-triggered) | Yes (Updates quarantine issue) | Yes * (updates quarantine issue) | ++---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+ + +Cancel other workflow runs workflow +----------------------------------- + +This workflow is run only on schedule (every 5 minutes) it's only purpose is to cancel other running +``CI Build`` workflows if important jobs failed in those runs. This is to save runners for other runs +in case we know that the build will not succeed anyway without some basic fixes to static checks or +documentation - effectively implementing missing "fail-fast" (on a job level) in Github Actions +similar to fail-fast in matrix strategy. + +The jobs that are considered as "fail-fast" are: + +* Static checks +* Docs +* Prepare Backport packages +* Helm tests +* Build Prod Image +* TTest OpenAPI client gen diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000000..6a4a918a29bf4 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,24 @@ + + +# Contributor Covenant Code of Conduct + +The Apache Airflow project follows the [Apache Software Foundation code of conduct](https://www.apache.org/foundation/policies/conduct.html). + +If you observe behavior that violates those rules please follow the [ASF reporting guidelines](https://www.apache.org/foundation/policies/conduct#reporting-guidelines). diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index cb5f45a7ab992..b722d8e030f9a 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -84,44 +84,163 @@ If you are proposing a new feature: - Remember that this is a volunteer-driven project, and that contributions are welcome :) -Documentation -============= +Contribution Workflow +===================== -The latest API documentation is usually available -`here `__. +Typically, you start your first contribution by reviewing open tickets +at `GitHub issues `__. -To generate a local version: +If you create pull-request, you don't have to create an issue first, but if you want, you can do it. +Creating an issue will allow you to collect feedback or share plans with other people. -1. Set up an Airflow development environment. +For example, you want to have the following sample ticket assigned to you: +`#7782: Add extra CC: to the emails sent by Airflow `_. -2. Install the ``doc`` extra. +In general, your contribution includes the following stages: -.. code-block:: bash +.. image:: images/workflow.png + :align: center + :alt: Contribution Workflow - pip install -e '.[doc]' +1. Make your own `fork `__ of + the Apache Airflow `main repository `__. +2. Create a `local virtualenv `_, + initialize the `Breeze environment `__, and + install `pre-commit framework `__. + If you want to add more changes in the future, set up your fork and enable Github Actions. -3. Generate and serve the documentation as follows: +3. Join `devlist `__ + and set up a `Slack account `__. + +4. Make the change and create a `Pull Request from your fork `__. + +5. Ping @ #development slack, comment @people. Be annoying. Be considerate. + +Step 1: Fork the Apache Airflow Repo +------------------------------------ +From the `apache/airflow `_ repo, +`create a fork `_: + +.. image:: images/fork.png + :align: center + :alt: Creating a fork + + +Step 2: Configure Your Environment +---------------------------------- +Configure the Docker-based Breeze development environment and run tests. + +You can use the default Breeze configuration as follows: + +1. Install the latest versions of the Docker Community Edition + and Docker Compose and add them to the PATH. + +2. Enter Breeze: ``./breeze`` + + Breeze starts with downloading the Airflow CI image from + the Docker Hub and installing all required dependencies. + +3. Enter the Docker environment and mount your local sources + to make them immediately visible in the environment. + +4. Create a local virtualenv, for example: .. code-block:: bash - cd docs - ./build - ./start_doc_server.sh + mkvirtualenv myenv --python=python3.6 -.. note:: - The docs build script ``build`` requires Python 3.6 or greater. +5. Initialize the created environment: -**Known issues:** +.. code-block:: bash -If you are creating a new directory for new integration in the ``airflow.providers`` package, -you should also update the ``docs/autoapi_templates/index.rst`` file. + ./breeze --initialize-local-virtualenv -If you are creating a ``hooks``, ``sensors``, ``operators`` directory in -the ``airflow.providers`` package, you should also update -the ``docs/operators-and-hooks-ref.rst`` file. +6. Open your IDE (for example, PyCharm) and select the virtualenv you created + as the project's default virtualenv in your IDE. + +Step 3: Connect with People +--------------------------- -If you are creating ``example_dags`` directory, you need to create ``example_dags/__init__.py`` with Apache license or copy another ``__init__.py`` file that contains the necessary license. +For effective collaboration, make sure to join the following Airflow groups: + +- Mailing lists: + + - Developer’s mailing list ``_ + (quite substantial traffic on this list) + + - All commits mailing list: ``_ + (very high traffic on this list) + + - Airflow users mailing list: ``_ + (reasonably small traffic on this list) + +- `Issues on GitHub `__ + +- `Slack (chat) `__ + +Step 4: Prepare PR +------------------ + +1. Update the local sources to address the issue. + + For example, to address this example issue, do the following: + + * Read about `email configuration in Airflow `__. + + * Find the class you should modify. For the example github issue, + this is `email.py `__. + + * Find the test class where you should add tests. For the example ticket, + this is `test_email.py `__. + + * Make sure your fork's master is synced with Apache Airflow's master before you create a branch. See + `How to sync your fork <#how-to-sync-your-fork>`_ for details. + + * Create a local branch for your development. Make sure to use latest + ``apache/master`` as base for the branch. See `How to Rebase PR <#how-to-rebase-pr>`_ for some details + on setting up the ``apache`` remote. Note, some people develop their changes directly in their own + ``master`` branches - this is OK and you can make PR from your master to ``apache/master`` but we + recommend to always create a local branch for your development. This allows you to easily compare + changes, have several changes that you work on at the same time and many more. + If you have ``apache`` set as remote then you can make sure that you have latest changes in your master + by ``git pull apache master`` when you are in the local ``master`` branch. If you have conflicts and + want to override your locally changed master you can override your local changes with + ``git fetch apache; git reset --hard apache/master``. + + * Modify the class and add necessary code and unit tests. + + * Run the unit tests from the `IDE `__ + or `local virtualenv `__ as you see fit. + + * Run the tests in `Breeze `__. + + * Run and fix all the `static checks `__. If you have + `pre-commits installed `__, + this step is automatically run while you are committing your code. If not, you can do it manually + via ``git add`` and then ``pre-commit run``. + +2. Rebase your fork, squash commits, and resolve all conflicts. See `How to rebase PR <#how-to-rebase-pr>`_ + if you need help with rebasing your change. Remember to rebase often if your PR takes a lot of time to + review/fix. This will make rebase process much easier and less painful and the more often you do it, + the more comfortable you will feel doing it. + +3. Re-run static code checks again. + +4. Create a pull request with the following title for the sample ticket: + ``[AIRFLOW-5934] Added extra CC: field to the Airflow emails.`` + +Make sure to follow other PR guidelines described in `this document <#pull-request-guidelines>`_. + +Step 5: Pass PR Review +---------------------- + +.. image:: images/review.png + :align: center + :alt: PR Review + +Note that committers will use **Squash and Merge** instead of **Rebase and Merge** +when merging PRs and your commit will be squashed to single commit. Pull Request Guidelines ======================= @@ -458,6 +577,46 @@ snowflake slack .. END PACKAGE DEPENDENCIES HERE +Documentation +============= + +The latest API documentation (for the master branch) is usually available +`here `__. + +To generate a local version: + +1. Set up an Airflow development environment. + +2. Install the ``doc`` extra. + +.. code-block:: bash + + pip install -e '.[doc]' + + +3. Generate and serve the documentation as follows: + +.. code-block:: bash + + cd docs + ./build + ./start_doc_server.sh + +.. note:: + The docs build script ``build`` requires Python 3.6 or greater. + +**Known issues:** + +If you are creating a new directory for new integration in the ``airflow.providers`` package, +you should also update the ``docs/autoapi_templates/index.rst`` file. + +If you are creating new ``hooks``, ``sensors``, ``operators`` directory in +the ``airflow.providers`` package, you should also update +the ``docs/operators-and-hooks-ref.rst`` file. + +If you are creating ``example_dags`` directory, you need to create ``example_dags/__init__.py`` with Apache +license or copy another ``__init__.py`` file that contains the necessary license. + Static code checks ================== @@ -509,27 +668,8 @@ If this function is designed to be called by "end-users" (i.e. DAG authors) then ... # You SHOULD not commit the session here. The wrapper will take care of commit()/rollback() if exception -Test Infrastructure -=================== - -We support the following types of tests: - -* **Unit tests** are Python tests launched with ``pytest``. - Unit tests are available both in the `Breeze environment `_ - and `local virtualenv `_. - -* **Integration tests** are available in the Breeze development environment - that is also used for Airflow's CI tests. Integration test are special tests that require - additional services running, such as Postgres, Mysql, Kerberos, etc. - -* **System tests** are automatic tests that use external systems like - Google Cloud Platform. These tests are intended for an end-to-end DAG execution. - -For details on running different types of Airflow tests, see `TESTING.rst `_. - - Naming Conventions for provider packages -======================================== +---------------------------------------- In Airflow 2.0 we standardized and enforced naming for provider packages, modules and classes. those rules (introduced as AIP-21) were not only introduced but enforced using automated checks @@ -557,18 +697,18 @@ The rules are as follows: * secrets -> secret backends are stored here * transfers -> transfer operators are stored here -* Module names do not contain word "hooks" , "operators" etc. The right type comes from +* Module names do not contain word "hooks", "operators" etc. The right type comes from the package. For example 'hooks.datastore' module contains DataStore hook and 'operators.datastore' contains DataStore operators. * Class names contain 'Operator', 'Hook', 'Sensor' - for example DataStoreHook, DataStoreExportOperator -* Operator name usually follows the convention: Operator +* Operator name usually follows the convention: ``Operator`` (BigQueryExecuteQueryOperator) is a good example * Transfer Operators are those that actively push data from one service/provider and send it to another service (might be for the same or another provider). This usually involves two hooks. The convention - for those ToOperator. They are not named *TransferOperator nor *Transfer. + for those ``ToOperator``. They are not named *TransferOperator nor *Transfer. * Operators that use external service to perform transfer (for example CloudDataTransferService operators are not placed in "transfers" package and do not have to follow the naming convention for @@ -582,14 +722,32 @@ The rules are as follows: * For Cloud Providers or Service providers that usually means that the transfer operators should land at the "target" side of the transfer -* Secret Backend name follows the convention: Backend. +* Secret Backend name follows the convention: ``Backend``. -* Tests are grouped in parallel packages under "tests.providers" top level package. Module name is usually - "test_.py', +* Tests are grouped in parallel packages under "tests.providers" top level package. Module name is usually + ``test_.py``, * System tests (not yet fully automated but allowing to run e2e testing of particular provider) are named with _system.py suffix. +Test Infrastructure +=================== + +We support the following types of tests: + +* **Unit tests** are Python tests launched with ``pytest``. + Unit tests are available both in the `Breeze environment `_ + and `local virtualenv `_. + +* **Integration tests** are available in the Breeze development environment + that is also used for Airflow's CI tests. Integration test are special tests that require + additional services running, such as Postgres, Mysql, Kerberos, etc. + +* **System tests** are automatic tests that use external systems like + Google Cloud Platform. These tests are intended for an end-to-end DAG execution. + +For details on running different types of Airflow tests, see `TESTING.rst `_. + Metadata Database Updates ========================= @@ -707,161 +865,18 @@ commands: # Check JS code in .js and .html files, report any errors/warnings and fix them if possible yarn run lint:fix -Contribution Workflow Example -============================== - -Typically, you start your first contribution by reviewing open tickets -at `GitHub issues `__. - -If you create pull-request, you don't have to create an issue first, but if you want, you can do it. -Creating an issue will allow you to collect feedback or share plans with other people. - -For example, you want to have the following sample ticket assigned to you: -`#7782: Add extra CC: to the emails sent by Aiflow `_. - -In general, your contribution includes the following stages: - -.. image:: images/workflow.png - :align: center - :alt: Contribution Workflow - -1. Make your own `fork `__ of - the Apache Airflow `main repository `__. - -2. Create a `local virtualenv `_, - initialize the `Breeze environment `__, and - install `pre-commit framework `__. - If you want to add more changes in the future, set up your fork and enable Github Actions. - -3. Join `devlist `__ - and set up a `Slack account `__. - -4. Make the change and create a `Pull Request from your fork `__. - -5. Ping @ #development slack, comment @people. Be annoying. Be considerate. - -Step 1: Fork the Apache Repo ----------------------------- -From the `apache/airflow `_ repo, -`create a fork `_: - -.. image:: images/fork.png - :align: center - :alt: Creating a fork - - -Step 2: Configure Your Environment ----------------------------------- -Configure the Docker-based Breeze development environment and run tests. - -You can use the default Breeze configuration as follows: - -1. Install the latest versions of the Docker Community Edition - and Docker Compose and add them to the PATH. - -2. Enter Breeze: ``./breeze`` - - Breeze starts with downloading the Airflow CI image from - the Docker Hub and installing all required dependencies. - -3. Enter the Docker environment and mount your local sources - to make them immediately visible in the environment. - -4. Create a local virtualenv, for example: - -.. code-block:: bash - - mkvirtualenv myenv --python=python3.6 - -5. Initialize the created environment: - -.. code-block:: bash - - ./breeze --initialize-local-virtualenv - -6. Open your IDE (for example, PyCharm) and select the virtualenv you created - as the project's default virtualenv in your IDE. - -Step 3: Connect with People ---------------------------- - -For effective collaboration, make sure to join the following Airflow groups: - -- Mailing lists: - - - Developer’s mailing list ``_ - (quite substantial traffic on this list) - - - All commits mailing list: ``_ - (very high traffic on this list) - - - Airflow users mailing list: ``_ - (reasonably small traffic on this list) +How to sync your fork +===================== -- `Issues on GitHub `__ +When you have your fork, you should periodically synchronize the master of your fork with the +Apache Airflow master. In order to do that you can ``git pull --rebase`` to your local git repository from +apache remote and push the master (often with ``--force`` to your fork). There is also an easy +way using ``Force sync master from apache/airflow`` workflow. You can go to "Actions" in your repository and +choose the workflow and manually trigger the workflow using "Run workflow" command. -- `Slack (chat) `__ +This will force-push the master from apache/airflow to the master in your fork. Note that in case you +modified the master in your fork, you might loose those changes. -Step 4: Prepare PR ------------------- - -1. Update the local sources to address the issue. - - For example, to address this example issue, do the following: - - * Read about `email configuration in Airflow `__. - - * Find the class you should modify. For the example ticket, - this is `email.py `__. - - * Find the test class where you should add tests. For the example ticket, - this is `test_email.py `__. - - * Create a local branch for your development. Make sure to use latest - ``apache/master`` as base for the branch. See `How to Rebase PR <#how-to-rebase-pr>`_ for some details - on setting up the ``apache`` remote. Note - some people develop their changes directy in their own - ``master`` branches - this is OK and you can make PR from your master to ``apache/master`` but we - recommend to always create a local branch for your development. This allows you to easily compare - changes, have several changes that you work on at the same time and many more. - If you have ``apache`` set as remote then you can make sure that you have latest changes in your master - by ``git pull apache master`` when you are in the local ``master`` branch. If you have conflicts and - want to override your locally changed master you can override your local changes with - ``git fetch apache; git reset --hard apache/master``. - - * Modify the class and add necessary code and unit tests. - - * Run the unit tests from the `IDE `__ - or `local virtualenv `__ as you see fit. - - * Run the tests in `Breeze `__. - - * Run and fix all the `static checks `__. If you have - `pre-commits installed `__, - this step is automatically run while you are committing your code. If not, you can do it manually - via ``git add`` and then ``pre-commit run``. - -2. Rebase your fork, squash commits, and resolve all conflicts. See `How to rebase PR <#how-to-rebase-pr>`_ - if you need help with rebasing your change. Remember to rebase often if your PR takes a lot of time to - review/fix. This will make rebase process much easier and less painful - and the more often you do it, - the more comfortable you will feel doing it. - -3. Re-run static code checks again. - -4. Create a pull request with the following title for the sample ticket: - ``[AIRFLOW-5934] Added extra CC: field to the Airflow emails.`` - -Make sure to follow other PR guidelines described in `this document <#pull-request-guidelines>`_. - - -Step 5: Pass PR Review ----------------------- - -.. image:: images/review.png - :align: center - :alt: PR Review - -Note that committers will use **Squash and Merge** instead of **Rebase and Merge** -when merging PRs and your commit will be squashed to single commit. How to rebase PR ================ @@ -874,7 +889,6 @@ author of the change. It also produces a "single-line" series of commits in mast makes it much easier to understand what was going on and to find reasons for problems (it is especially useful for "bisecting" when looking for a commit that introduced some bugs. - First of all - you can read about rebase workflow here: `Merging vs. rebasing `_ - this is an excellent article that describes all ins/outs of rebase. I recommend reading it and keeping it as reference. diff --git a/IMAGES.rst b/IMAGES.rst index 6cdc3b2248e80..e403c915eaf90 100644 --- a/IMAGES.rst +++ b/IMAGES.rst @@ -649,7 +649,7 @@ The PROD image entrypoint works as follows: * In case the user is not "airflow" (with undefined user id) and the group id of the user is set to 0 (root), then the user is dynamically added to /etc/passwd at entry using USER_NAME variable to define the user name. This is in order to accommodate the - `OpenShift Guidelines`_ + `OpenShift Guidelines `_ * If ``AIRFLOW__CORE__SQL_ALCHEMY_CONN`` variable is passed to the container and it is either mysql or postgres SQL alchemy connection, then the connection is checked and the script waits until the database is reachable. diff --git a/README.md b/README.md index e61290983bac0..8b26c32b75e58 100644 --- a/README.md +++ b/README.md @@ -24,9 +24,11 @@ [![Documentation Status](https://readthedocs.org/projects/airflow/badge/?version=latest)](https://airflow.readthedocs.io/en/latest/?badge=latest) [![License](http://img.shields.io/:license-Apache%202-blue.svg)](http://www.apache.org/licenses/LICENSE-2.0.txt) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/apache-airflow.svg)](https://pypi.org/project/apache-airflow/) +[![Docker Pulls](https://img.shields.io/docker/pulls/apache/airflow.svg)](https://hub.docker.com/r/apache/airflow) +[![Docker Stars](https://img.shields.io/docker/stars/apache/airflow.svg)](https://hub.docker.com/r/apache/airflow) [![Twitter Follow](https://img.shields.io/twitter/follow/ApacheAirflow.svg?style=social&label=Follow)](https://twitter.com/ApacheAirflow) -[![Slack Status](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://apache-airflow-slack.herokuapp.com/) +[![Slack Status](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://s.apache.org/airflow-slack) [Apache Airflow](https://airflow.apache.org/docs/stable/) (or simply Airflow) is a platform to programmatically author, schedule, and monitor workflows. @@ -124,7 +126,7 @@ pip install apache-airflow[postgres,google]==1.10.11 \ In order to use Airflow in Docker Compose or Kubernetes, you might need to use or build production images of Apache Airflow. The community provides two types of support for the production images: -* We provide pre-build relesed version of production image in PyPI build from released +* We provide pre-build released version of production image in PyPI build from released sources of Apache Airflow - shortly after release. Those images are available in the DockerHub. You can pull those images via `docker pull apache/airflow:-pythonX.Y` - version is the version number (for example 1.10.11). Additionally `docker pull apache/airflow` will pull latest @@ -210,13 +212,13 @@ those packages can only be used in python3.6+ environment. ### Installing Airflow 2.0 operators in Airflow 1.10 We released backport packages that can be installed for older Airflow versions. -Those backport packages are going to be released more frequently that main Airflow 1.10.& releases. +Those backport packages are going to be released more frequently that main Airflow 1.10.* releases. You will not have to upgrade your Airflow version to use those packages. You can find those packages in the [PyPI](https://pypi.org/search/?q=apache-airflow-backport-providers&o=) and install them separately for each provider. -Those packages are available now and can be used in the latest Airflow 1.10* version. Most of those +Those packages are available now and can be used in the latest Airflow 1.10.* version. Most of those packages are also installable and usable in most Airflow 1.10.* releases but there is no extensive testing done beyond the latest released version, so you might expect more problems in earlier Airflow versions. @@ -393,6 +395,7 @@ Currently **officially** using Airflow: 1. [Drivy](https://www.drivy.com) [[@AntoineAugusti](https://github.com/AntoineAugusti)] 1. [Dynata](https://www.dynata.com) [[@neil3handari](https://github.com/neil3handari)] 1. [Easy Taxi](http://www.easytaxi.com/) [[@caique-lima](https://github.com/caique-lima) & [@diraol](https://github.com/diraol)] +1. [EBANX](https://www.ebanx.com/) [[@estevammr](https://github.com/estevammr) & [@nathangngencissk](https://github.com/nathangngencissk) & [@raafaadg](https://github.com/raafaadg) & [@whrocha](https://github.com/whrocha)] 1. [EllisDon](http://www.ellisdon.com/) [[@d2kalra](https://github.com/d2kalra) & [@zbasama](https://github.com/zbasama)] 1. [Endesa](https://www.endesa.com) [[@drexpp](https://github.com/drexpp)] 1. [Enigma](https://www.enigma.com) [[@hydrosquall](https://github.com/hydrosquall)] @@ -591,6 +594,7 @@ Currently **officially** using Airflow: 1. [Thinking Machines](https://thinkingmachin.es) [[@marksteve](https://github.com/marksteve)] 1. [Thinknear](https://www.thinknear.com/) [[@d3cay1](https://github.com/d3cay1), [@ccson](https://github.com/ccson), & [@ababian](https://github.com/ababian)] 1. [ThoughtWorks](https://www.thoughtworks.com/) [[@sann3](https://github.com/sann3)] +1. [ThredUP](https://www.thredup.com/) [[@kosteev](https://github.com/kosteev)] 1. [Thumbtack](https://www.thumbtack.com/) [[@kamalacharya](https://github.com/kamalacharya), [@dwjoss](https://github.com/dwjoss)] 1. [Tictail](https://tictail.com/) 1. [Tile](https://tile.com/) [[@ranjanmanish](https://github.com/ranjanmanish)] @@ -650,5 +654,5 @@ Yes! Be sure to abide by the Apache Foundation [trademark policies](https://www. ## Links - [Documentation](https://airflow.apache.org/docs/stable/) -- [Chat](https://apache-airflow-slack.herokuapp.com/) +- [Chat](https://s.apache.org/airflow-slack) - [More](https://cwiki.apache.org/confluence/display/AIRFLOW/Airflow+Links) diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst index 42522c37da3c6..2c4d304c972b7 100644 --- a/STATIC_CODE_CHECKS.rst +++ b/STATIC_CODE_CHECKS.rst @@ -230,13 +230,11 @@ To fix a pylint issue, do the following: 1. Remove module/modules from the `scripts/ci/static_checks/pylint_todo.txt `__. -2. Run `scripts/ci/static_checks/ci_pylint_main.sh `__ and - `scripts/ci/ci_pylint_tests.sh `__. +2. Run `scripts/ci/static_checks/ci_pylint.sh `__. 3. Fix all the issues reported by pylint. -4. Re-run `scripts/ci/static_checks/ci_pylint_main.sh `__ and - `scripts/ci/ci_pylint_tests.sh `__. +4. Re-run `scripts/ci/static_checks/ci_pylint.sh `__. 5. If you see "success", submit a PR following `Pull Request guidelines <#pull-request-guidelines>`__. @@ -375,8 +373,7 @@ this, run the following scripts: * ``_ - runs Flake8 source code style enforcement tool. * ``_ - runs lint checker for the dockerfiles. * ``_ - runs a check for mypy type annotation consistency. -* ``_ - runs pylint static code checker for main files. -* ``_ - runs pylint static code checker for tests. +* ``_ - runs pylint static code checker. The scripts may ask you to rebuild the images, if needed. @@ -393,8 +390,7 @@ If you are already in the Breeze Docker environment (by running the ``./breeze`` you can also run the same static checks via run_scripts: * Mypy: ``./scripts/ci/in_container/run_mypy.sh airflow tests`` -* Pylint for main files: ``./scripts/ci/in_container/run_pylint_main.sh`` -* Pylint for test files: ``./scripts/ci/in_container/run_pylint_tests.sh`` +* Pylint: ``./scripts/ci/in_container/run_pylint.sh`` * Flake8: ``./scripts/ci/in_container/run_flake8.sh`` * License check: ``./scripts/ci/in_container/run_check_licence.sh`` * Documentation: ``./scripts/ci/in_container/run_docs_build.sh`` diff --git a/UPDATING.md b/UPDATING.md index 923db253c5013..af365a43ffd0b 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -47,6 +47,13 @@ assists users migrating to a new version. ## Airflow Master +The 2.0 release of the Airflow is a significant upgrade, and includes substantial major changes, +and some of them may be breaking. Existing code written for earlier versions of this project will may require updates +to use this version. Sometimes necessary configuration changes are also required. +This document describes the changes that have been made, and what you need to do to update your usage. + +If you experience issues or have questions, please file [an issue](https://github.com/apache/airflow/issues/new/choose). + -### GCSTaskHandler has been moved -The `GCSTaskHandler` class from `airflow.utils.log.gcs_task_handler` has been moved to -`airflow.providers.google.cloud.log.gcs_task_handler`. This is because it has items specific to `google cloud`. +### Major changes -### WasbTaskHandler has been moved -The `WasbTaskHandler` class from `airflow.utils.log.wasb_task_handler` has been moved to -`airflow.providers.microsoft.azure.log.wasb_task_handler`. This is because it has items specific to `azure`. +This section describes the major changes that have been made in this release. -### StackdriverTaskHandler has been moved -The `StackdriverTaskHandler` class from `airflow.utils.log.stackdriver_task_handler` has been moved to -`airflow.providers.google.cloud.log.stackdriver_task_handler`. This is because it has items specific to `google cloud`. +#### Python 2 support is going away -### S3TaskHandler has been moved -The `S3TaskHandler` class from `airflow.utils.log.s3_task_handler` has been moved to -`airflow.providers.amazon.aws.log.s3_task_handler`. This is because it has items specific to `aws`. +> WARNING: Breaking change -### ElasticsearchTaskHandler has been moved -The `ElasticsearchTaskHandler` class from `airflow.utils.log.es_task_handler` has been moved to -`airflow.providers.elasticsearch.log.es_task_handler`. This is because it has items specific to `elasticsearch`. +Airflow 1.10 will be the last release series to support Python 2. Airflow 2.0.0 will only support Python 3.6 and up. -### CloudwatchTaskHandler has been moved -The `CloudwatchTaskHandler` class from `airflow.utils.log.cloudwatch_task_handler` has been moved to -`airflow.providers.amazon.aws.log.cloudwatch_task_handler`. This is because it has items specific to `aws`. +If you have a specific task that still requires Python 2 then you can use the PythonVirtualenvOperator for this. -### SendGrid emailer has been moved -Formerly the core code was maintained by the original creators - Airbnb. The code that was in the contrib -package was supported by the community. The project was passed to the Apache community and currently the -entire code is maintained by the community, so now the division has no justification, and it is only due -to historical reasons. +#### Drop legacy UI in favor of FAB RBAC UI -To clean up, the `send_mail` function from the `airflow.contrib.utils.sendgrid` module has been moved. +> WARNING: Breaking change + +Previously we were using two versions of UI, which were hard to maintain as we need to implement/update the same feature +in both versions. With this release we've removed the older UI in favor of Flask App Builder RBAC UI. No need to set the +RBAC UI explicitly in the configuration now as this is the only default UI. We did it to avoid +the huge maintenance burden of two independent user interfaces + +Please note that that custom auth backends will need re-writing to target new FAB based UI. + +As part of this change, a few configuration items in `[webserver]` section are removed and no longer applicable, +including `authenticate`, `filter_by_owner`, `owner_mode`, and `rbac`. + +Before upgrading to this release, we recommend activating the new FAB RBAC UI. For that, you should set +the `rbac` options in `[webserver]` in the `airflow.cfg` file to `true` -If your configuration file looks like this: -```ini -[email] -email_backend = airflow.contrib.utils.sendgrid.send_email -``` -It should look like this now: ```ini -[email] -email_backend = airflow.providers.sendgrid.utils.emailer.send_email +[webserver] +rbac = true ``` -The old configuration still works but can be abandoned. +In order to login to the interface, you need to create an administrator account. +``` +airflow create_user \ + --role Admin \ + --username admin \ + --firstname FIRST_NAME \ + --lastname LAST_NAME \ + --email EMAIL@example.org +``` -### Weekday enum has been moved -Formerly the core code was maintained by the original creators - Airbnb. The code that was in the contrib -package was supported by the community. The project was passed to the Apache community and currently the -entire code is maintained by the community, so now the division has no justification, and it is only due -to historical reasons. +If you have already installed Airflow 2.0, you can create a user with the command `airflow users create`. +You don't need to make changes to the configuration file as the FAB RBAC UI is +the only supported UI. +``` +airflow users create \ + --role Admin \ + --username admin \ + --firstname FIRST_NAME \ + --lastname LAST_NAME \ + --email EMAIL@example.org +``` -To clean up, `Weekday` enum has been moved from `airflow.contrib.utils` into `airflow.utils` module. +#### Changes to import paths -### airflow.contrib.utils.log has been moved Formerly the core code was maintained by the original creators - Airbnb. The code that was in the contrib package was supported by the community. The project was passed to the Apache community and currently the entire code is maintained by the community, so now the division has no justification, and it is only due -to historical reasons. - -To clean up, modules in `airflow.contrib.utils.log` have been moved into `airflow.utils.log` -this includes: -* `TaskHandlerWithCustomFormatter` class +to historical reasons. In Airflow 2.0, we want to organize packages and move integrations +with third party services to the ``airflow.providers`` package. + +All changes made are backward compatible, but if you use the old import paths you will +see a deprecation warning. The old import paths can be abandoned in the future. + + +### Migration Guide from Experimental API to Stable API v1 +In Airflow 2.0, we added the new REST API. Experimental API still works, but support may be dropped in the future. +If your application is still using the experimental API, you should consider migrating to the stable API. + +The stable API exposes many endpoints available through the webserver. Here are the +differences between the two endpoints that will help you migrate from the +experimental REST API to the stable REST API. + +#### Base Endpoint +The base endpoint for the stable API v1 is ``/api/v1/``. You must change the +experimental base endpoint from ``/api/experimental/`` to ``/api/v1/``. +The table below shows the differences: + +| Purpose | Experimental REST API Endpoint | Stable REST API Endpoint | +|-----------------------------------|----------------------------------------------------------------------------------|--------------------------------------------------------------------------------| +| Create a DAGRuns(POST) | /api/experimental/dags//dag_runs | /api/v1/dags/{dag_id}/dagRuns | +| List DAGRuns(GET) | /api/experimental/dags//dag_runs | /api/v1/dags/{dag_id}/dagRuns | +| Check Health status(GET) | /api/experimental/test | /api/v1/health | +| Task information(GET) | /api/experimental/dags//tasks/ | /api/v1//dags/{dag_id}/tasks/{task_id} | +| TaskInstance public variable(GET) | /api/experimental/dags//dag_runs//tasks/ | /api/v1/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id} | +| Pause DAG(PATCH) | /api/experimental/dags//paused/ | /api/v1/dags/{dag_id} | +| Information of paused DAG(GET) | /api/experimental/dags//paused | /api/v1/dags/{dag_id} | +| Latest DAG Runs(GET) | /api/experimental/latest_runs | /api/v1/dags/{dag_id}/dagRuns | +| Get all pools(GET) | /api/experimental/pools | /api/v1/pools | +| Create a pool(POST) | /api/experimental/pools | /api/v1/pools | +| Delete a pool(DELETE) | /api/experimental/pools/ | /api/v1/pools/{pool_name} | +| DAG Lineage(GET) | /api/experimental/lineage/// | /api/v1/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries | + +#### Note +This endpoint ``/api/v1/dags/{dag_id}/dagRuns`` also allows you to filter dag_runs with parameters such as ``start_date``, ``end_date``, ``execution_date`` etc in the query string. +Therefore the operation previously performed by this endpoint + + /api/experimental/dags//dag_runs/ + +can now be handled with filter parameters in the query string. +Getting information about latest runs can be accomplished with the help of +filters in the query string of this endpoint(``/api/v1/dags/{dag_id}/dagRuns``). Please check the Stable API +reference documentation for more information + + +### Changes to Exception handling for from DAG callbacks + +Exception from DAG callbacks used to crash scheduler. In order to make +scheduler more robust, we have changed this behavior to log the exception +instead. On top of that, a new `dag.callback_exceptions` counter metric has +been added to help better monitor callback exceptions. + + +### CLI changes in Airflow 2.0 + +The Airflow CLI has been organized so that related commands are grouped together as subcommands, +which means that if you use these commands in your scripts, you have to make changes to them. + +This section describes the changes that have been made, and what you need to do to update your script. + +The ability to manipulate users from the command line has been changed. ``airflow create_user``, ``airflow delete_user`` + and ``airflow list_users`` has been grouped to a single command `airflow users` with optional flags `create`, `list` and `delete`. + +The `airflow list_dags` command is now `airflow dags list`, `airflow pause` is `airflow dags pause`, etc. + +In Airflow 1.10 and 2.0 there is an `airflow config` command but there is a difference in behavior. In Airflow 1.10, +it prints all config options while in Airflow 2.0, it's a command group. `airflow config` is now `airflow config list`. +You can check other options by running the command `airflow config --help` + +For a complete list of updated CLI commands, see https://airflow.apache.org/cli.html. + +You can learn about the commands by running ``airflow --help``. For example to get help about the ``celery`` group command, +you have to run the help command: ``airflow celery --help``. + +| Old command | New command | Group | +|-----------------------------|------------------------------------|--------------------| +| ``airflow worker`` | ``airflow celery worker`` | ``celery`` | +| ``airflow flower`` | ``airflow celery flower`` | ``celery`` | +| ``airflow trigger_dag`` | ``airflow dags trigger`` | ``dags`` | +| ``airflow delete_dag`` | ``airflow dags delete`` | ``dags`` | +| ``airflow show_dag`` | ``airflow dags show`` | ``dags`` | +| ``airflow list_dag`` | ``airflow dags list`` | ``dags`` | +| ``airflow dag_status`` | ``airflow dags status`` | ``dags`` | +| ``airflow backfill`` | ``airflow dags backfill`` | ``dags`` | +| ``airflow list_dag_runs`` | ``airflow dags list_runs`` | ``dags`` | +| ``airflow pause`` | ``airflow dags pause`` | ``dags`` | +| ``airflow unpause`` | ``airflow dags unpause`` | ``dags`` | +| ``airflow test`` | ``airflow tasks test`` | ``tasks`` | +| ``airflow clear`` | ``airflow tasks clear`` | ``tasks`` | +| ``airflow list_tasks`` | ``airflow tasks list`` | ``tasks`` | +| ``airflow task_failed_deps``| ``airflow tasks failed_deps`` | ``tasks`` | +| ``airflow task_state`` | ``airflow tasks state`` | ``tasks`` | +| ``airflow run`` | ``airflow tasks run`` | ``tasks`` | +| ``airflow render`` | ``airflow tasks render`` | ``tasks`` | +| ``airflow initdb`` | ``airflow db init`` | ``db`` | +| ``airflow resetdb`` | ``airflow db reset`` | ``db`` | +| ``airflow upgradedb`` | ``airflow db upgrade`` | ``db`` | +| ``airflow checkdb`` | ``airflow db check`` | ``db`` | +| ``airflow shell`` | ``airflow db shell`` | ``db`` | +| ``airflow pool`` | ``airflow pools`` | ``pools`` | +| ``airflow create_user`` | ``airflow users create`` | ``users`` | +| ``airflow delete_user`` | ``airflow users delete`` | ``users`` | +| ``airflow list_users`` | ``airflow users list`` | ``users`` | + + +Example Usage for the ``users`` group: -### Deprecated method in Connection - -The connection module has new deprecated methods: - -- `Connection.parse_from_uri` -- `Connection.log_info` -- `Connection.debug_info` +To create a new user: +```bash +airflow users create --username jondoe --lastname doe --firstname jon --email jdoe@apache.org --role Viewer --password test +``` -and one deprecated function: -- `parse_netloc_to_hostname` +To list users: +```bash +airflow users list +``` -Previously, users could create a connection object in two ways +To delete a user: +```bash +airflow users delete --username jondoe ``` -conn_1 = Connection(conn_id="conn_a", uri="mysql://AAA/") -# or -conn_2 = Connection(conn_id="conn_a") -conn_2.parse_uri(uri="mysql://AAA/") + +To add a user to a role: +```bash +airflow users add-role --username jondoe --role Public ``` -Now the second way is not supported. -`Connection.log_info` and `Connection.debug_info` method have been deprecated. Read each Connection field individually or use the -default representation (`__repr__`). +To remove a user from a role: +```bash +airflow users remove-role --username jondoe --role Public +``` -The old method is still works but can be abandoned at any time. The changes are intended to delete method -that are rarely used. +#### Use exactly single character for short option style change in CLI -### BaseOperator uses metaclass +For Airflow short option, use exactly one single character, New commands are available according to the following table: -`BaseOperator` class uses a `BaseOperatorMeta` as a metaclass. This meta class is based on -`abc.ABCMeta`. If your custom operator uses different metaclass then you will have to adjust it. +| Old command | New command | +| :----------------------------------------------------| :---------------------------------------------------| +| ``airflow (dags\|tasks\|scheduler) [-sd, --subdir]`` | ``airflow (dags\|tasks\|scheduler) [-S, --subdir]`` | +| ``airflow tasks test [-dr, --dry_run]`` | ``airflow tasks test [-n, --dry-run]`` | +| ``airflow dags backfill [-dr, --dry_run]`` | ``airflow dags backfill [-n, --dry-run]`` | +| ``airflow tasks clear [-dx, --dag_regex]`` | ``airflow tasks clear [-R, --dag-regex]`` | +| ``airflow kerberos [-kt, --keytab]`` | ``airflow kerberos [-k, --keytab]`` | +| ``airflow tasks run [-int, --interactive]`` | ``airflow tasks run [-N, --interactive]`` | +| ``airflow webserver [-hn, --hostname]`` | ``airflow webserver [-H, --hostname]`` | +| ``airflow celery worker [-cn, --celery_hostname]`` | ``airflow celery worker [-H, --celery-hostname]`` | +| ``airflow celery flower [-hn, --hostname]`` | ``airflow celery flower [-H, --hostname]`` | +| ``airflow celery flower [-fc, --flower_conf]`` | ``airflow celery flower [-c, --flower-conf]`` | +| ``airflow celery flower [-ba, --basic_auth]`` | ``airflow celery flower [-A, --basic-auth]`` | +| ``airflow celery flower [-tp, --task_params]`` | ``airflow celery flower [-t, --task-params]`` | +| ``airflow celery flower [-pm, --post_mortem]`` | ``airflow celery flower [-m, --post-mortem]`` | -### Not-nullable conn_type column in connection table +For Airflow long option, use [kebab-case](https://en.wikipedia.org/wiki/Letter_case) instead of [snake_case](https://en.wikipedia.org/wiki/Snake_case) -The `conn_type` column in the `connection` table must contain content. Previously, this rule was enforced -by application logic, but was not enforced by the database schema. +| Old option | New option | +| :--------------------------------- | :--------------------------------- | +| ``--task_regex`` | ``--task-regex`` | +| ``--start_date`` | ``--start-date`` | +| ``--end_date`` | ``--end-date`` | +| ``--dry_run`` | ``--dry-run`` | +| ``--no_backfill`` | ``--no-backfill`` | +| ``--mark_success`` | ``--mark-success`` | +| ``--donot_pickle`` | ``--donot-pickle`` | +| ``--ignore_dependencies`` | ``--ignore-dependencies`` | +| ``--ignore_first_depends_on_past`` | ``--ignore-first-depends-on-past`` | +| ``--delay_on_limit`` | ``--delay-on-limit`` | +| ``--reset_dagruns`` | ``--reset-dagruns`` | +| ``--rerun_failed_tasks`` | ``--rerun-failed-tasks`` | +| ``--run_backwards`` | ``--run-backwards`` | +| ``--only_failed`` | ``--only-failed`` | +| ``--only_running`` | ``--only-running`` | +| ``--exclude_subdags`` | ``--exclude-subdags`` | +| ``--exclude_parentdag`` | ``--exclude-parentdag`` | +| ``--dag_regex`` | ``--dag-regex`` | +| ``--run_id`` | ``--run-id`` | +| ``--exec_date`` | ``--exec-date`` | +| ``--ignore_all_dependencies`` | ``--ignore-all-dependencies`` | +| ``--ignore_depends_on_past`` | ``--ignore-depends-on-past`` | +| ``--ship_dag`` | ``--ship-dag`` | +| ``--job_id`` | ``--job-id`` | +| ``--cfg_path`` | ``--cfg-path`` | +| ``--ssl_cert`` | ``--ssl-cert`` | +| ``--ssl_key`` | ``--ssl-key`` | +| ``--worker_timeout`` | ``--worker-timeout`` | +| ``--access_logfile`` | ``--access-logfile`` | +| ``--error_logfile`` | ``--error-logfile`` | +| ``--dag_id`` | ``--dag-id`` | +| ``--num_runs`` | ``--num-runs`` | +| ``--do_pickle`` | ``--do-pickle`` | +| ``--celery_hostname`` | ``--celery-hostname`` | +| ``--broker_api`` | ``--broker-api`` | +| ``--flower_conf`` | ``--flower-conf`` | +| ``--url_prefix`` | ``--url-prefix`` | +| ``--basic_auth`` | ``--basic-auth`` | +| ``--task_params`` | ``--task-params`` | +| ``--post_mortem`` | ``--post-mortem`` | +| ``--conn_uri`` | ``--conn-uri`` | +| ``--conn_type`` | ``--conn-type`` | +| ``--conn_host`` | ``--conn-host`` | +| ``--conn_login`` | ``--conn-login`` | +| ``--conn_password`` | ``--conn-password`` | +| ``--conn_schema`` | ``--conn-schema`` | +| ``--conn_port`` | ``--conn-port`` | +| ``--conn_extra`` | ``--conn-extra`` | +| ``--use_random_password`` | ``--use-random-password`` | +| ``--skip_serve_logs`` | ``--skip-serve-logs`` | -If you made any modifications to the table directly, make sure you don't have -null in the conn_type column. +#### Remove serve_logs command from CLI -### DAG.create_dagrun accepts run_type and does not require run_id -This change is caused by adding `run_type` column to `DagRun`. +The ``serve_logs`` command has been deleted. This command should be run only by internal application mechanisms +and there is no need for it to be accessible from the CLI interface. -Previous signature: -```python -def create_dagrun(self, - run_id, - state, - execution_date=None, - start_date=None, - external_trigger=False, - conf=None, - session=None): -``` -current: -```python -def create_dagrun(self, - state, - execution_date=None, - run_id=None, - start_date=None, - external_trigger=False, - conf=None, - run_type=None, - session=None): -``` -If user provides `run_id` then the `run_type` will be derived from it by checking prefix, allowed types -: `manual`, `scheduled`, `backfill` (defined by `airflow.utils.types.DagRunType`). +#### dag_state CLI command -If user provides `run_type` and `execution_date` then `run_id` is constructed as -`{run_type}__{execution_data.isoformat()}`. +If the DAGRun was triggered with conf key/values passed in, they will also be printed in the dag_state CLI response +ie. running, {"name": "bob"} +whereas in in prior releases it just printed the state: +ie. running -Airflow should construct dagruns using `run_type` and `execution_date`, creation using -`run_id` is preserved for user actions. +#### Deprecating ignore_first_depends_on_past on backfill command and default it to True +When doing backfill with `depends_on_past` dags, users will need to pass `--ignore-first-depends-on-past`. +We should default it as `true` to avoid confusion -### Standardised "extra" requirements +### Database schema changes -We standardised the Extras names and synchronized providers package names with the main airflow extras. +In order to migrate the database, you should use the command `airflow db upgrade`, but in +some cases manual steps are required. -We deprecated a number of extras in 2.0. +#### Unique conn_id in connection table -| Deprecated extras | New extras | -|-------------------|------------------| -| atlas | apache.atlas | -| aws | amazon | -| azure | microsoft.azure | -| cassandra | apache.cassandra | -| druid | apache.druid | -| gcp | google | -| gcp_api | google | -| hdfs | apache.hdfs | -| hive | apache.hive | -| kubernetes | cncf.kubernetes | -| mssql | microsoft.mssql | -| pinot | apache.pinot | -| webhdfs | apache.webhdfs | -| winrm | apache.winrm | - -For example instead of `pip install apache-airflow[atlas]` you should use -`pip install apache-airflow[apache.atlas]` . +Previously, Airflow allowed users to add more than one connection with the same `conn_id` and on access it would choose one connection randomly. This acted as a basic load balancing and fault tolerance technique, when used in conjunction with retries. -The deprecated extras will be removed in 2.1: +This behavior caused some confusion for users, and there was no clear evidence if it actually worked well or not. -### Skipped tasks can satisfy wait_for_downstream +Now the `conn_id` will be unique. If you already have duplicates in your metadata database, you will have to manage those duplicate connections before upgrading the database. -Previously, a task instance with `wait_for_downstream=True` will only run if the downstream task of -the previous task instance is successful. Meanwhile, a task instance with `depends_on_past=True` -will run if the previous task instance is either successful or skipped. These two flags are close siblings -yet they have different behavior. This inconsistency in behavior made the API less intuitive to users. -To maintain consistent behavior, both successful or skipped downstream task can now satisfy the -`wait_for_downstream=True` flag. +#### Not-nullable conn_type column in connection table +The `conn_type` column in the `connection` table must contain content. Previously, this rule was enforced +by application logic, but was not enforced by the database schema. -### Use DagRunType.SCHEDULED.value instead of DagRun.ID_PREFIX +If you made any modifications to the table directly, make sure you don't have +null in the conn_type column. -All the run_id prefixes for different kind of DagRuns have been grouped into a single -enum in `airflow.utils.types.DagRunType`. +### Configuration changes -Previously, there were defined in various places, example as `ID_PREFIX` class variables for -`DagRun`, `BackfillJob` and in `_trigger_dag` function. +This release contains many changes that require a change in the configuration of this application or +other application that integrate with it. -Was: +This section describes the changes that have been made, and what you need to do to. -```python ->> from airflow.models.dagrun import DagRun ->> DagRun.ID_PREFIX -scheduled__ -``` +#### airflow.contrib.utils.log has been moved -Replaced by: +Formerly the core code was maintained by the original creators - Airbnb. The code that was in the contrib +package was supported by the community. The project was passed to the Apache community and currently the +entire code is maintained by the community, so now the division has no justification, and it is only due +to historical reasons. In Airflow 2.0, we want to organize packages and move integrations +with third party services to the ``airflow.providers`` package. + +To clean up, the following packages were moved: +| Old package | New package | +|-|-| +| ``airflow.contrib.utils.log`` | ``airflow.utils.log`` | +| ``airflow.utils.log.gcs_task_handler`` | ``airflow.providers.google.cloud.log.gcs_task_handler`` | +| ``airflow.utils.log.wasb_task_handler`` | ``airflow.providers.microsoft.azure.log.wasb_task_handler`` | +| ``airflow.utils.log.stackdriver_task_handler`` | ``airflow.providers.google.cloud.log.stackdriver_task_handler`` | +| ``airflow.utils.log.s3_task_handler`` | ``airflow.providers.amazon.aws.log.s3_task_handler`` | +| ``airflow.utils.log.es_task_handler`` | ``airflow.providers.elasticsearch.log.es_task_handler`` | +| ``airflow.utils.log.cloudwatch_task_handler`` | ``airflow.providers.amazon.aws.log.cloudwatch_task_handler`` | + +You should update the import paths if you are setting log configurations with the ``logging_config_class`` option. +The old import paths still works but can be abandoned. + +#### SendGrid emailer has been moved +Formerly the core code was maintained by the original creators - Airbnb. The code that was in the contrib +package was supported by the community. The project was passed to the Apache community and currently the +entire code is maintained by the community, so now the division has no justification, and it is only due +to historical reasons. -```python ->> from airflow.utils.types import DagRunType ->> DagRunType.SCHEDULED.value -scheduled -``` +To clean up, the `send_mail` function from the `airflow.contrib.utils.sendgrid` module has been moved. -### Ability to patch Pool.DEFAULT_POOL_NAME in BaseOperator -It was not possible to patch pool in BaseOperator as the signature sets the default value of pool -as Pool.DEFAULT_POOL_NAME. -While using subdagoperator in unittest(without initializing the sqlite db), it was throwing the -following error: +If your configuration file looks like this: +```ini +[email] +email_backend = airflow.contrib.utils.sendgrid.send_email ``` -sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) no such table: slot_pool. +It should look like this now: +```ini +[email] +email_backend = airflow.providers.sendgrid.utils.emailer.send_email ``` -Fix for this, https://github.com/apache/airflow/pull/8587 -### Change signature of BigQueryGetDatasetTablesOperator -Was: -```python -BigQueryGetDatasetTablesOperator(dataset_id: str, dataset_resource: dict, ...) -``` -and now it is: -```python -BigQueryGetDatasetTablesOperator(dataset_resource: dict, dataset_id: Optional[str] = None, ...) -``` +The old configuration still works but can be abandoned. -### Unify `hostname_callable` option in `core` section +#### Unify `hostname_callable` option in `core` section The previous option used a colon(`:`) to split the module from function. Now the dot(`.`) is used. The change aims to unify the format of all options that refer to objects in the `airflow.cfg` file. -### Changes in BigQueryHook -In general all hook methods are decorated with `@GoogleBaseHook.fallback_to_default_project_id` thus -parameters to hook can only be passed via keyword arguments. -- `create_empty_table` method accepts now `table_resource` parameter. If provided all -other parameters are ignored. -- `create_empty_dataset` will now use values from `dataset_reference` instead of raising error -if parameters were passed in `dataset_reference` and as arguments to method. Additionally validation -of `dataset_reference` is done using `Dataset.from_api_repr`. Exception and log messages has been -changed. -- `update_dataset` requires now new `fields` argument (breaking change) -- `delete_dataset` has new signature (dataset_id, project_id, ...) -previous one was (project_id, dataset_id, ...) (breaking change) -- `get_tabledata` returns list of rows instead of API response in dict format. This method is deprecated in - favor of `list_rows`. (breaking change) +#### Custom executors is loaded using full import path -### Added mypy plugin to preserve types of decorated functions +In previous versions of Airflow it was possible to use plugins to load custom executors. It is still +possible, but the configuration has changed. Now you don't have to create a plugin to configure a +custom executor, but you need to provide the full path to the module in the `executor` option +in the `core` section. The purpose of this change is to simplify the plugin mechanism and make +it easier to configure executor. -Mypy currently doesn't support precise type information for decorated -functions; see https://github.com/python/mypy/issues/3157 for details. -To preserve precise type definitions for decorated functions, we now -include a mypy plugin to preserve precise type definitions for decorated -functions. To use the plugin, update your setup.cfg: +If your module was in the path `my_acme_company.executors.MyCustomExecutor` and the plugin was +called `my_plugin` then your configuration looks like this +```ini +[core] +executor = my_plguin.MyCustomExecutor ``` -[mypy] -plugins = - airflow.mypy.plugin.decorators +And now it should look like this: +```ini +[core] +executor = my_acme_company.executors.MyCustomExecutor ``` -### Use project_id argument consistently across GCP hooks and operators +The old configuration is still works but can be abandoned at any time. -- Changed order of arguments in DataflowHook.start_python_dataflow. Uses - with positional arguments may break. -- Changed order of arguments in DataflowHook.is_job_dataflow_running. Uses - with positional arguments may break. -- Changed order of arguments in DataflowHook.cancel_job. Uses - with positional arguments may break. -- Added optional project_id argument to DataflowCreateJavaJobOperator - constructor. -- Added optional project_id argument to DataflowTemplatedJobStartOperator - constructor. -- Added optional project_id argument to DataflowCreatePythonJobOperator - constructor. +#### Drop plugin support for stat_name_handler -### GCSUploadSessionCompleteSensor signature change +In previous version, you could use plugins mechanism to configure ``stat_name_handler``. You should now use the `stat_name_handler` +option in `[scheduler]` section to achieve the same effect. -To provide more precise control in handling of changes to objects in -underlying GCS Bucket the constructor of this sensor now has changed. +If your plugin looked like this and was available through the `test_plugin` path: +```python +def my_stat_name_handler(stat): + return stat -- Old Behavior: This constructor used to optionally take ``previous_num_objects: int``. -- New replacement constructor kwarg: ``previous_objects: Optional[Set[str]]``. +class AirflowTestPlugin(AirflowPlugin): + name = "test_plugin" + stat_name_handler = my_stat_name_handler +``` +then your `airflow.cfg` file should look like this: +```ini +[scheduler] +stat_name_handler=test_plugin.my_stat_name_handler +``` -Most users would not specify this argument because the bucket begins empty -and the user wants to treat any files as new. +This change is intended to simplify the statsd configuration. -Example of Updating usage of this sensor: -Users who used to call: +#### Logging configuration has been moved to new section -``GCSUploadSessionCompleteSensor(bucket='my_bucket', prefix='my_prefix', previous_num_objects=1)`` +The following configurations have been moved from `[core]` to the new `[logging]` section. -Will now call: +* `base_log_folder` +* `remote_logging` +* `remote_log_conn_id` +* `remote_base_log_folder` +* `encrypt_s3_logs` +* `logging_level` +* `fab_logging_level` +* `logging_config_class` +* `colored_console_log` +* `colored_log_format` +* `colored_formatter_class` +* `log_format` +* `simple_log_format` +* `task_log_prefix_template` +* `log_filename_template` +* `log_processor_filename_template` +* `dag_processor_manager_log_location` +* `task_log_reader` -``GCSUploadSessionCompleteSensor(bucket='my_bucket', prefix='my_prefix', previous_num_objects={'.keep'})`` +#### Remove gcp_service_account_keys option in airflow.cfg file -Where '.keep' is a single file at your prefix that the sensor should not consider new. +This option has been removed because it is no longer supported by the Google Kubernetes Engine. The new +recommended service account keys for the Google Cloud Platform management method is +[Workload Identity](https://cloud.google.com/kubernetes-engine/docs/how-to/workload-identity). + +#### Fernet is enabled by default + +The fernet mechanism is enabled by default to increase the security of the default installation. In order to +restore the previous behavior, the user must consciously set an empty key in the ``fernet_key`` option of +section ``[core]`` in the ``airflow.cfg`` file. + +At the same time, this means that the `apache-airflow[crypto]` extra-packages are always installed. +However, this requires that your operating system has ``libffi-dev`` installed. + +#### Changes to propagating Kubernetes worker annotations + +`kubernetes_annotations` configuration section has been removed. +A new key `worker_annotations` has been added to existing `kubernetes` section instead. +That is to remove restriction on the character set for k8s annotation keys. +All key/value pairs from `kubernetes_annotations` should now go to `worker_annotations` as a json. I.e. instead of e.g. +``` +[kubernetes_annotations] +annotation_key = annotation_value +annotation_key2 = annotation_value2 +``` +it should be rewritten to +``` +[kubernetes] +worker_annotations = { "annotation_key" : "annotation_value", "annotation_key2" : "annotation_value2" } +``` + +#### Remove run_duration +We should not use the `run_duration` option anymore. This used to be for restarting the scheduler from time to time, but right now the scheduler is getting more stable and therefore using this setting is considered bad and might cause an inconsistent state. -### Rename pool statsd metrics +#### Rename pool statsd metrics Used slot has been renamed to running slot to make the name self-explanatory and the code more maintainable. @@ -365,36 +547,44 @@ This means `pool.used_slots.` metric has been renamed to `pool.running_slots.`. The `Used Slots` column in Pools Web UI view has also been changed to `Running Slots`. -### Remove SQL support in base_hook +#### Removal of Mesos Executor -Remove ``get_records`` and ``get_pandas_df`` and ``run`` from base_hook, which only apply for sql like hook, -If want to use them, or your custom hook inherit them, please use ``dbapi_hook`` - -### Changes to SalesforceHook - -Replace parameter ``sandbox`` with ``domain``. According to change in simple-salesforce package +The Mesos Executor is removed from the code base as it was not widely used and not maintained. [Mailing List Discussion on deleting it](https://lists.apache.org/thread.html/daa9500026b820c6aaadeffd66166eae558282778091ebbc68819fb7@%3Cdev.airflow.apache.org%3E). -### Rename parameter name in PinotAdminHook.create_segment +#### Change dag loading duration metric name +Change DAG file loading duration metric from +`dag.loading-duration.` to `dag.loading-duration.`. This is to +better handle the case when a DAG file has multiple DAGs. -Rename parameter name from ``format`` to ``segment_format`` in PinotAdminHook function create_segment fro pylint compatible +### Changes to the core operators/hooks -### Rename parameter name in HiveMetastoreHook.get_partitions +We strive to ensure that there are no changes that may affect the end user and your files, but this +release may contain changes that will require changes to your DAG files. -Rename parameter name from ``filter`` to ``partition_filter`` in HiveMetastoreHook function get_partitions for pylint compatible +This section describes the changes that have been made, and what you need to do to update your DAG File, +if you use core operators or any other. -### Remove unnecessary parameter in FTPHook.list_directory +#### BaseSensorOperator to make respect the trigger_rule of downstream tasks -Remove unnecessary parameter ``nlst`` in FTPHook function list_directory for pylint compatible +Previously, BaseSensorOperator with setting soft_fail=True becomes skipped itself +and skips all its downstream tasks unconditionally, when it fails. +The point is not respect the trigger_rule of downstream tasks, when it fails. +In the new behavior, the trigger_rule of downstream tasks are respected. +User can preserve/achieve the original behaviour by setting every downstream task to all_success, +because downstream tasks with trigger_rule all_success (i.e. the default) are skipped +when upstream task is skipped. -### Remove unnecessary parameter in PostgresHook function copy_expert +#### BaseOperator uses metaclass -Remove unnecessary parameter ``open`` in PostgresHook function copy_expert for pylint compatible +`BaseOperator` class uses a `BaseOperatorMeta` as a metaclass. This meta class is based on +`abc.ABCMeta`. If your custom operator uses different metaclass then you will have to adjust it. -### Change parameter name in OpsgenieAlertOperator +#### Remove SQL support in base_hook -Change parameter name from ``visibleTo`` to ``visible_to`` in OpsgenieAlertOperator for pylint compatible +Remove ``get_records`` and ``get_pandas_df`` and ``run`` from base_hook, which only apply for sql like hook, +If want to use them, or your custom hook inherit them, please use ``airflow.hooks.dbapi_hook.DbApiHook`` -### Assigning task to a DAG using bitwise shift (bit-shift) operators are no longer supported +#### Assigning task to a DAG using bitwise shift (bit-shift) operators are no longer supported Previously, you could assign a task to a DAG as follows: @@ -412,131 +602,31 @@ with DAG('my_dag'): dummy = DummyOperator(task_id='dummy') ``` -### Deprecating ignore_first_depends_on_past on backfill command and default it to True - -When doing backfill with `depends_on_past` dags, users will need to pass `--ignore-first-depends-on-past`. -We should default it as `true` to avoid confusion - -### Custom executors is loaded using full import path - -In previous versions of Airflow it was possible to use plugins to load custom executors. It is still -possible, but the configuration has changed. Now you don't have to create a plugin to configure a -custom executor, but you need to provide the full path to the module in the `executor` option -in the `core` section. The purpose of this change is to simplify the plugin mechanism and make -it easier to configure executor. - -If your module was in the path `my_acme_company.executors.MyCustomExecutor` and the plugin was -called `my_plugin` then your configuration looks like this - -```ini -[core] -executor = my_plguin.MyCustomExecutor -``` -And now it should look like this: -```ini -[core] -executor = my_acme_company.executors.MyCustomExecutor -``` - -The old configuration is still works but can be abandoned at any time. - -### Removed sub-package imports from `airflow/__init__.py` - -The imports `LoggingMixin`, `conf`, and `AirflowException` have been removed from `airflow/__init__.py`. -All implicit references of these objects will no longer be valid. To migrate, all usages of each old path must be -replaced with its corresponding new path. - -| Old Path (Implicit Import) | New Path (Explicit Import) | -|------------------------------|--------------------------------------------------| -| ``airflow.LoggingMixin`` | ``airflow.utils.log.logging_mixin.LoggingMixin`` | -| ``airflow.conf`` | ``airflow.configuration.conf`` | -| ``airflow.AirflowException`` | ``airflow.exceptions.AirflowException`` | - -### Added `airflow dags test` CLI command - -A new command was added to the CLI for executing one full run of a DAG for a given execution date, similar to -`airflow tasks test`. Example usage: - -``` -airflow dags test [dag_id] [execution_date] -airflow dags test example_branch_operator 2018-01-01 -``` - -### Drop plugin support for stat_name_handler - -In previous version, you could use plugins mechanism to configure ``stat_name_handler``. You should now use the `stat_name_handler` -option in `[scheduler]` section to achieve the same effect. - -If your plugin looked like this and was available through the `test_plugin` path: -```python -def my_stat_name_handler(stat): - return stat - -class AirflowTestPlugin(AirflowPlugin): - name = "test_plugin" - stat_name_handler = my_stat_name_handler -``` -then your `airflow.cfg` file should look like this: -```ini -[scheduler] -stat_name_handler=test_plugin.my_stat_name_handler -``` - -This change is intended to simplify the statsd configuration. - -### Move methods from BiqQueryBaseCursor to BigQueryHook +#### Removed deprecated import mechanism -To simplify BigQuery operators (no need of `Cursor`) and standardize usage of hooks within all GCP integration methods from `BiqQueryBaseCursor` -were moved to `BigQueryHook`. Using them by from `Cursor` object is still possible due to preserved backward compatibility but they will raise `DeprecationWarning`. -The following methods were moved: +The deprecated import mechanism has been removed so the import of modules becomes more consistent and explicit. -| Old path | New path | -|------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------| -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.cancel_query | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.cancel_query | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.create_empty_dataset | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_empty_dataset | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.create_empty_table | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_empty_table | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.create_external_table | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_external_table | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.delete_dataset | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.delete_dataset | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_dataset | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_dataset_tables | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset_tables | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_dataset_tables_list | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset_tables_list | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_datasets_list | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_datasets_list | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_schema | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_schema | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_tabledata | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_tabledata | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.insert_all | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.insert_all | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.patch_dataset | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.patch_dataset | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.patch_table | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.patch_table | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.poll_job_complete | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.poll_job_complete | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_copy | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_copy | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_extract | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_extract | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_grant_dataset_view_access | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_grant_dataset_view_access | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_load | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_load | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_query | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_query | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_table_delete | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_table_delete | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_table_upsert | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_table_upsert | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_with_configuration | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_with_configuration | -| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.update_dataset | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.update_dataset | +For example: `from airflow.operators import BashOperator` +becomes `from airflow.operators.bash_operator import BashOperator` -### Standardize handling http exception in BigQuery +#### Changes to sensor imports -Since BigQuery is the part of the GCP it was possible to simplify the code by handling the exceptions -by usage of the `airflow.providers.google.common.hooks.base.GoogleBaseHook.catch_http_exception` decorator however it changes -exceptions raised by the following methods: -* `airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_table_delete` raises `AirflowException` instead of `Exception`. -* `airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.create_empty_dataset` raises `AirflowException` instead of `ValueError`. -* `airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_dataset` raises `AirflowException` instead of `ValueError`. +Sensors are now accessible via `airflow.sensors` and no longer via `airflow.operators.sensors`. -### Remove airflow.utils.file.TemporaryDirectory +For example: `from airflow.operators.sensors import BaseSensorOperator` +becomes `from airflow.sensors.base_sensor_operator import BaseSensorOperator` -Since Airflow dropped support for Python < 3.5 there's no need to have this custom -implementation of `TemporaryDirectory` because the same functionality is provided by -`tempfile.TemporaryDirectory`. +#### Skipped tasks can satisfy wait_for_downstream -Now users instead of `import from airflow.utils.files import TemporaryDirectory` should -do `from tempfile import TemporaryDirectory`. Both context managers provide the same -interface, thus no additional changes should be required. +Previously, a task instance with `wait_for_downstream=True` will only run if the downstream task of +the previous task instance is successful. Meanwhile, a task instance with `depends_on_past=True` +will run if the previous task instance is either successful or skipped. These two flags are close siblings +yet they have different behavior. This inconsistency in behavior made the API less intuitive to users. +To maintain consistent behavior, both successful or skipped downstream task can now satisfy the +`wait_for_downstream=True` flag. -### Chain and cross_downstream moved from helpers to BaseOperator +#### `airflow.utils.helpers.cross_downstream` +#### `airflow.utils.helpers.chain` The `chain` and `cross_downstream` methods are now moved to airflow.models.baseoperator module from `airflow.utils.helpers` module. @@ -564,325 +654,358 @@ from airflow.models.baseoperator import chain from airflow.models.baseoperator import cross_downstream ``` -### Change python3 as Dataflow Hooks/Operators default interpreter +#### `airflow.operators.python.BranchPythonOperator` +`BranchPythonOperator` will now return a value equal to the `task_id` of the chosen branch, +where previously it returned None. Since it inherits from BaseOperator it will do an +`xcom_push` of this value if `do_xcom_push=True`. This is useful for downstream decision-making. -Now the `py_interpreter` argument for DataFlow Hooks/Operators has been changed from python2 to python3. +#### `airflow.sensors.sql_sensor.SqlSensor` -### Logging configuration has been moved to new section +SQLSensor now consistent with python `bool()` function and the `allow_null` parameter has been removed. -The following configurations have been moved from `[core]` to the new `[logging]` section. +It will resolve after receiving any value that is casted to `True` with python `bool(value)`. That +changes the previous response receiving `NULL` or `'0'`. Earlier `'0'` has been treated as success +criteria. `NULL` has been treated depending on value of `allow_null`parameter. But all the previous +behaviour is still achievable setting param `success` to `lambda x: x is None or str(x) not in ('0', '')`. -* `base_log_folder` -* `remote_logging` -* `remote_log_conn_id` -* `remote_base_log_folder` -* `encrypt_s3_logs` -* `logging_level` -* `fab_logging_level` -* `logging_config_class` -* `colored_console_log` -* `colored_log_format` -* `colored_formatter_class` -* `log_format` -* `simple_log_format` -* `task_log_prefix_template` -* `log_filename_template` -* `log_processor_filename_template` -* `dag_processor_manager_log_location` -* `task_log_reader` +#### `airflow.operators.dagrun_operator.TriggerDagRunOperator` -### Simplification of CLI commands +The TriggerDagRunOperator now takes a `conf` argument to which a dict can be provided as conf for the DagRun. +As a result, the `python_callable` argument was removed. PR: https://github.com/apache/airflow/pull/6317. -#### Grouped to improve UX of CLI +#### `airflow.operators.python.PythonOperator` -Some commands have been grouped to improve UX of CLI. New commands are available according to the following table: +`provide_context` argument on the PythonOperator was removed. The signature of the callable passed to the PythonOperator is now inferred and argument values are always automatically provided. There is no need to explicitly provide or not provide the context anymore. For example: -| Old command | New command | -|---------------------------|------------------------------------| -| ``airflow worker`` | ``airflow celery worker`` | -| ``airflow flower`` | ``airflow celery flower`` | +```python +def myfunc(execution_date): + print(execution_date) -#### Cli use exactly single character for short option style change +python_operator = PythonOperator(task_id='mytask', python_callable=myfunc, dag=dag) +``` -For Airflow short option, use exactly one single character, New commands are available according to the following table: +Notice you don't have to set provide_context=True, variables from the task context are now automatically detected and provided. -| Old command | New command | -| :------------------------------------------------- | :------------------------------------------------ | -| ``airflow (dags|tasks|scheduler) [-sd, --subdir]`` | ``airflow (dags|tasks|scheduler) [-S, --subdir]`` | -| ``airflow tasks test [-dr, --dry_run]`` | ``airflow tasks test [-n, --dry-run]`` | -| ``airflow dags backfill [-dr, --dry_run]`` | ``airflow dags backfill [-n, --dry-run]`` | -| ``airflow tasks clear [-dx, --dag_regex]`` | ``airflow tasks clear [-R, --dag-regex]`` | -| ``airflow kerberos [-kt, --keytab]`` | ``airflow kerberos [-k, --keytab]`` | -| ``airflow tasks run [-int, --interactive]`` | ``airflow tasks run [-N, --interactive]`` | -| ``airflow webserver [-hn, --hostname]`` | ``airflow webserver [-H, --hostname]`` | -| ``airflow celery worker [-cn, --celery_hostname]`` | ``airflow celery worker [-H, --celery-hostname]`` | -| ``airflow celery flower [-hn, --hostname]`` | ``airflow celery flower [-H, --hostname]`` | -| ``airflow celery flower [-fc, --flower_conf]`` | ``airflow celery flower [-c, --flower-conf]`` | -| ``airflow celery flower [-ba, --basic_auth]`` | ``airflow celery flower [-A, --basic-auth]`` | -| ``airflow celery flower [-tp, --task_params]`` | ``airflow celery flower [-t, --task-params]`` | -| ``airflow celery flower [-pm, --post_mortem]`` | ``airflow celery flower [-m, --post-mortem]`` | +All context variables can still be provided with a double-asterisk argument: -For Airflow long option, use [kebab-case](https://en.wikipedia.org/wiki/Letter_case) instead of [snake_case](https://en.wikipedia.org/wiki/Snake_case) +```python +def myfunc(**context): + print(context) # all variables will be provided to context -| Old option | New option | -| :--------------------------------- | :--------------------------------- | -| ``--task_regex`` | ``--task-regex`` | -| ``--start_date`` | ``--start-date`` | -| ``--end_date`` | ``--end-date`` | -| ``--dry_run`` | ``--dry-run`` | -| ``--no_backfill`` | ``--no-backfill`` | -| ``--mark_success`` | ``--mark-success`` | -| ``--donot_pickle`` | ``--donot-pickle`` | -| ``--ignore_dependencies`` | ``--ignore-dependencies`` | -| ``--ignore_first_depends_on_past`` | ``--ignore-first-depends-on-past`` | -| ``--delay_on_limit`` | ``--delay-on-limit`` | -| ``--reset_dagruns`` | ``--reset-dagruns`` | -| ``--rerun_failed_tasks`` | ``--rerun-failed-tasks`` | -| ``--run_backwards`` | ``--run-backwards`` | -| ``--only_failed`` | ``--only-failed`` | -| ``--only_running`` | ``--only-running`` | -| ``--exclude_subdags`` | ``--exclude-subdags`` | -| ``--exclude_parentdag`` | ``--exclude-parentdag`` | -| ``--dag_regex`` | ``--dag-regex`` | -| ``--run_id`` | ``--run-id`` | -| ``--exec_date`` | ``--exec-date`` | -| ``--ignore_all_dependencies`` | ``--ignore-all-dependencies`` | -| ``--ignore_depends_on_past`` | ``--ignore-depends-on-past`` | -| ``--ship_dag`` | ``--ship-dag`` | -| ``--job_id`` | ``--job-id`` | -| ``--cfg_path`` | ``--cfg-path`` | -| ``--ssl_cert`` | ``--ssl-cert`` | -| ``--ssl_key`` | ``--ssl-key`` | -| ``--worker_timeout`` | ``--worker-timeout`` | -| ``--access_logfile`` | ``--access-logfile`` | -| ``--error_logfile`` | ``--error-logfile`` | -| ``--dag_id`` | ``--dag-id`` | -| ``--num_runs`` | ``--num-runs`` | -| ``--do_pickle`` | ``--do-pickle`` | -| ``--celery_hostname`` | ``--celery-hostname`` | -| ``--broker_api`` | ``--broker-api`` | -| ``--flower_conf`` | ``--flower-conf`` | -| ``--url_prefix`` | ``--url-prefix`` | -| ``--basic_auth`` | ``--basic-auth`` | -| ``--task_params`` | ``--task-params`` | -| ``--post_mortem`` | ``--post-mortem`` | -| ``--conn_uri`` | ``--conn-uri`` | -| ``--conn_type`` | ``--conn-type`` | -| ``--conn_host`` | ``--conn-host`` | -| ``--conn_login`` | ``--conn-login`` | -| ``--conn_password`` | ``--conn-password`` | -| ``--conn_schema`` | ``--conn-schema`` | -| ``--conn_port`` | ``--conn-port`` | -| ``--conn_extra`` | ``--conn-extra`` | -| ``--use_random_password`` | ``--use-random-password`` | -| ``--skip_serve_logs`` | ``--skip-serve-logs`` | +python_operator = PythonOperator(task_id='mytask', python_callable=myfunc) +``` -### Remove serve_logs command from CLI +The task context variable names are reserved names in the callable function, hence a clash with `op_args` and `op_kwargs` results in an exception: -The ``serve_logs`` command has been deleted. This command should be run only by internal application mechanisms -and there is no need for it to be accessible from the CLI interface. +```python +def myfunc(dag): + # raises a ValueError because "dag" is a reserved name + # valid signature example: myfunc(mydag) -### dag_state CLI command +python_operator = PythonOperator( + task_id='mytask', + op_args=[1], + python_callable=myfunc, +) +``` -If the DAGRun was triggered with conf key/values passed in, they will also be printed in the dag_state CLI response -ie. running, {"name": "bob"} -whereas in in prior releases it just printed the state: -ie. running +The change is backwards compatible, setting `provide_context` will add the `provide_context` variable to the `kwargs` (but won't do anything). -### Remove gcp_service_account_keys option in airflow.cfg file +PR: [#5990](https://github.com/apache/airflow/pull/5990) -This option has been removed because it is no longer supported by the Google Kubernetes Engine. The new -recommended service account keys for the Google Cloud Platform management method is -[Workload Identity](https://cloud.google.com/kubernetes-engine/docs/how-to/workload-identity). +#### `airflow.sensors.filesystem.FileSensor` -### BranchPythonOperator has a return value -`BranchPythonOperator` will now return a value equal to the `task_id` of the chosen branch, -where previously it returned None. Since it inherits from BaseOperator it will do an -`xcom_push` of this value if `do_xcom_push=True`. This is useful for downstream decision-making. +FileSensor is now takes a glob pattern, not just a filename. If the filename you are looking for has `*`, `?`, or `[` in it then you should replace these with `[*]`, `[?]`, and `[[]`. -### Removal of airflow.AirflowMacroPlugin class +#### `airflow.operators.subdag_operator.SubDagOperator` -The class was there in airflow package but it has not been used (apparently since 2015). -It has been removed. +`SubDagOperator` is changed to use Airflow scheduler instead of backfill +to schedule tasks in the subdag. User no longer need to specify the executor +in `SubDagOperator`. -### Changes to settings -CONTEXT_MANAGER_DAG was removed from settings. It's role has been taken by `DagContext` in -'airflow.models.dag'. One of the reasons was that settings should be rather static than store -dynamic context from the DAG, but the main one is that moving the context out of settings allowed to -untangle cyclic imports between DAG, BaseOperator, SerializedDAG, SerializedBaseOperator which was -part of AIRFLOW-6010. +#### `airflow.providers.google.cloud.operators.datastore.CloudDatastoreExportEntitiesOperator` +#### `airflow.providers.google.cloud.operators.datastore.CloudDatastoreImportEntitiesOperator` +#### `airflow.providers.cncf.kubernetes.operators.kubernetes_pod.KubernetesPodOperator` +#### `airflow.providers.ssh.operators.ssh.SSHOperator` +#### `airflow.providers.microsoft.winrm.operators.winrm.WinRMOperator` +#### `airflow.operators.bash.BashOperator` +#### `airflow.providers.docker.operators.docker.DockerOperator` +#### `airflow.providers.http.operators.http.SimpleHttpOperator` +#### `airflow.providers.http.operators.http.SimpleHttpOperator` -#### Change default aws_conn_id in EMR operators +The `do_xcom_push` flag (a switch to push the result of an operator to xcom or not) was appearing in different incarnations in different operators. It's function has been unified under a common name (`do_xcom_push`) on `BaseOperator`. This way it is also easy to globally disable pushing results to xcom. -The default value for the [aws_conn_id](https://airflow.apache.org/howto/manage-connections.html#amazon-web-services) was accidently set to 's3_default' instead of 'aws_default' in some of the emr operators in previous -versions. This was leading to EmrStepSensor not being able to find their corresponding emr cluster. With the new -changes in the EmrAddStepsOperator, EmrTerminateJobFlowOperator and EmrCreateJobFlowOperator this issue is -solved. +The following operators were affected: -### Removal of redirect_stdout, redirect_stderr +* DatastoreExportOperator (Backwards compatible) +* DatastoreImportOperator (Backwards compatible) +* KubernetesPodOperator (Not backwards compatible) +* SSHOperator (Not backwards compatible) +* WinRMOperator (Not backwards compatible) +* BashOperator (Not backwards compatible) +* DockerOperator (Not backwards compatible) +* SimpleHttpOperator (Not backwards compatible) -Function `redirect_stderr` and `redirect_stdout` from `airflow.utils.log.logging_mixin` module has -been deleted because it can be easily replaced by the standard library. -The functions of the standard library are more flexible and can be used in larger cases. +See [AIRFLOW-3249](https://jira.apache.org/jira/browse/AIRFLOW-3249) for details -The code below -```python -import logging +#### `airflow.operators.latest_only_operator.LatestOnlyOperator` -from airflow.utils.log.logging_mixin import redirect_stderr, redirect_stdout +In previous versions, the `LatestOnlyOperator` forcefully skipped all (direct and undirect) downstream tasks on its own. From this version on the operator will **only skip direct downstream** tasks and the scheduler will handle skipping any further downstream dependencies. -logger = logging.getLogger("custom-logger") -with redirect_stdout(logger, logging.INFO), redirect_stderr(logger, logging.WARN): - print("I love Airflow") +No change is needed if only the default trigger rule `all_success` is being used. + +If the DAG relies on tasks with other trigger rules (i.e. `all_done`) being skipped by the `LatestOnlyOperator`, adjustments to the DAG need to be made to commodate the change in behaviour, i.e. with additional edges from the `LatestOnlyOperator`. + +The goal of this change is to achieve a more consistent and configurale cascading behaviour based on the `BaseBranchOperator` (see [AIRFLOW-2923](https://jira.apache.org/jira/browse/AIRFLOW-2923) and [AIRFLOW-1784](https://jira.apache.org/jira/browse/AIRFLOW-1784)). + +#### `airflow.sensors.time_sensor.TimeSensor` + +Previously `TimeSensor` always compared the `target_time` with the current time in UTC. + +Now it will compare `target_time` with the current time in the timezone of the DAG, +defaulting to the `default_timezone` in the global config. + +### Changes to the core Python API + +We strive to ensure that there are no changes that may affect the end user, and your Python files, but this +release may contain changes that will require changes to your plugins, DAG File or other integration. + +Only changes unique to this provider are described here. You should still pay attention to the changes that +have been made to the core (including core operators) as they can affect the integration behavior +of this provider. + +This section describes the changes that have been made, and what you need to do to update your Python files. + +#### Removed sub-package imports from `airflow/__init__.py` + +The imports `LoggingMixin`, `conf`, and `AirflowException` have been removed from `airflow/__init__.py`. +All implicit references of these objects will no longer be valid. To migrate, all usages of each old path must be +replaced with its corresponding new path. + +| Old Path (Implicit Import) | New Path (Explicit Import) | +|------------------------------|--------------------------------------------------| +| ``airflow.LoggingMixin`` | ``airflow.utils.log.logging_mixin.LoggingMixin`` | +| ``airflow.conf`` | ``airflow.configuration.conf`` | +| ``airflow.AirflowException`` | ``airflow.exceptions.AirflowException`` | +#### Variables removed from the task instance context + +The following variables were removed from the task instance context: +- end_date +- latest_date +- tables + + +#### `airflow.contrib.utils.Weekday` + +Formerly the core code was maintained by the original creators - Airbnb. The code that was in the contrib +package was supported by the community. The project was passed to the Apache community and currently the +entire code is maintained by the community, so now the division has no justification, and it is only due +to historical reasons. + +To clean up, `Weekday` enum has been moved from `airflow.contrib.utils` into `airflow.utils` module. + +#### `airflow.models.connection.Connection` + +The connection module has new deprecated methods: + +- `Connection.parse_from_uri` +- `Connection.log_info` +- `Connection.debug_info` + +and one deprecated function: +- `parse_netloc_to_hostname` + +Previously, users could create a connection object in two ways ``` -can be replaced by the following code: -```python -from contextlib import redirect_stdout, redirect_stderr -import logging +conn_1 = Connection(conn_id="conn_a", uri="mysql://AAA/") +# or +conn_2 = Connection(conn_id="conn_a") +conn_2.parse_uri(uri="mysql://AAA/") +``` +Now the second way is not supported. -from airflow.utils.log.logging_mixin import StreamLogWriter +`Connection.log_info` and `Connection.debug_info` method have been deprecated. Read each Connection field individually or use the +default representation (`__repr__`). -logger = logging.getLogger("custom-logger") +The old method is still works but can be abandoned at any time. The changes are intended to delete method +that are rarely used. -with redirect_stdout(StreamLogWriter(logger, logging.INFO)), \ - redirect_stderr(StreamLogWriter(logger, logging.WARN)): - print("I Love Airflow") +#### `airflow.models.dag.DAG.create_dagrun` +DAG.create_dagrun accepts run_type and does not require run_id +This change is caused by adding `run_type` column to `DagRun`. + +Previous signature: +```python +def create_dagrun(self, + run_id, + state, + execution_date=None, + start_date=None, + external_trigger=False, + conf=None, + session=None): ``` +current: +```python +def create_dagrun(self, + state, + execution_date=None, + run_id=None, + start_date=None, + external_trigger=False, + conf=None, + run_type=None, + session=None): +``` +If user provides `run_id` then the `run_type` will be derived from it by checking prefix, allowed types +: `manual`, `scheduled`, `backfill` (defined by `airflow.utils.types.DagRunType`). + +If user provides `run_type` and `execution_date` then `run_id` is constructed as +`{run_type}__{execution_data.isoformat()}`. -### Changes to SQLSensor +Airflow should construct dagruns using `run_type` and `execution_date`, creation using +`run_id` is preserved for user actions. -SQLSensor now consistent with python `bool()` function and the `allow_null` parameter has been removed. -It will resolve after receiving any value that is casted to `True` with python `bool(value)`. That -changes the previous response receiving `NULL` or `'0'`. Earlier `'0'` has been treated as success -criteria. `NULL` has been treated depending on value of `allow_null`parameter. But all the previous -behaviour is still achievable setting param `success` to `lambda x: x is None or str(x) not in ('0', '')`. +#### `airflow.models.dagrun.DagRun` -### Idempotency in BigQuery operators -Idempotency was added to `BigQueryCreateEmptyTableOperator` and `BigQueryCreateEmptyDatasetOperator`. -But to achieve that try / except clause was removed from `create_empty_dataset` and `create_empty_table` -methods of `BigQueryHook`. +Use DagRunType.SCHEDULED.value instead of DagRun.ID_PREFIX -### Migration of AWS components +All the run_id prefixes for different kind of DagRuns have been grouped into a single +enum in `airflow.utils.types.DagRunType`. -All AWS components (hooks, operators, sensors, example DAGs) will be grouped together as decided in -[AIP-21](https://cwiki.apache.org/confluence/display/AIRFLOW/AIP-21%3A+Changes+in+import+paths). Migrated -components remain backwards compatible but raise a `DeprecationWarning` when imported from the old module. -Migrated are: +Previously, there were defined in various places, example as `ID_PREFIX` class variables for +`DagRun`, `BackfillJob` and in `_trigger_dag` function. -| Old path | New path | -| ------------------------------------------------------------ | -------------------------------------------------------- | -| airflow.hooks.S3_hook.S3Hook | airflow.providers.amazon.aws.hooks.s3.S3Hook | -| airflow.contrib.hooks.aws_athena_hook.AWSAthenaHook | airflow.providers.amazon.aws.hooks.athena.AWSAthenaHook | -| airflow.contrib.hooks.aws_lambda_hook.AwsLambdaHook | airflow.providers.amazon.aws.hooks.lambda_function.AwsLambdaHook | -| airflow.contrib.hooks.aws_sqs_hook.SQSHook | airflow.providers.amazon.aws.hooks.sqs.SQSHook | -| airflow.contrib.hooks.aws_sns_hook.AwsSnsHook | airflow.providers.amazon.aws.hooks.sns.AwsSnsHook | -| airflow.contrib.operators.aws_athena_operator.AWSAthenaOperator | airflow.providers.amazon.aws.operators.athena.AWSAthenaOperator | -| airflow.contrib.operators.awsbatch.AWSBatchOperator | airflow.providers.amazon.aws.operators.batch.AwsBatchOperator | -| airflow.contrib.operators.awsbatch.BatchProtocol | airflow.providers.amazon.aws.hooks.batch_client.AwsBatchProtocol | -| private attrs and methods on AWSBatchOperator | airflow.providers.amazon.aws.hooks.batch_client.AwsBatchClient | -| n/a | airflow.providers.amazon.aws.hooks.batch_waiters.AwsBatchWaiters | -| airflow.contrib.operators.aws_sqs_publish_operator.SQSPublishOperator | airflow.providers.amazon.aws.operators.sqs.SQSPublishOperator | -| airflow.contrib.operators.aws_sns_publish_operator.SnsPublishOperator | airflow.providers.amazon.aws.operators.sns.SnsPublishOperator | -| airflow.contrib.sensors.aws_athena_sensor.AthenaSensor | airflow.providers.amazon.aws.sensors.athena.AthenaSensor | -| airflow.contrib.sensors.aws_sqs_sensor.SQSSensor | airflow.providers.amazon.aws.sensors.sqs.SQSSensor | +Was: -### AWS Batch Operator +```python +>> from airflow.models.dagrun import DagRun +>> DagRun.ID_PREFIX +scheduled__ +``` -The `AwsBatchOperator` was refactored to extract an `AwsBatchClient` (and inherit from it). The -changes are mostly backwards compatible and clarify the public API for these classes; some -private methods on `AwsBatchOperator` for polling a job status were relocated and renamed -to surface new public methods on `AwsBatchClient` (and via inheritance on `AwsBatchOperator`). A -couple of job attributes are renamed on an instance of `AwsBatchOperator`; these were mostly -used like private attributes but they were surfaced in the public API, so any use of them needs -to be updated as follows: -- `AwsBatchOperator().jobId` -> `AwsBatchOperator().job_id` -- `AwsBatchOperator().jobName` -> `AwsBatchOperator().job_name` +Replaced by: -The `AwsBatchOperator` gets a new option to define a custom model for waiting on job status changes. -The `AwsBatchOperator` can use a new `waiters` parameter, an instance of `AwsBatchWaiters`, to -specify that custom job waiters will be used to monitor a batch job. See the latest API -documentation for details. +```python +>> from airflow.utils.types import DagRunType +>> DagRunType.SCHEDULED.value +scheduled +``` + + +#### `airflow.utils.file.TemporaryDirectory` + +We remove airflow.utils.file.TemporaryDirectory +Since Airflow dropped support for Python < 3.5 there's no need to have this custom +implementation of `TemporaryDirectory` because the same functionality is provided by +`tempfile.TemporaryDirectory`. + +Now users instead of `import from airflow.utils.files import TemporaryDirectory` should +do `from tempfile import TemporaryDirectory`. Both context managers provide the same +interface, thus no additional changes should be required. -### Additional arguments passed to BaseOperator cause an exception +#### `airflow.AirflowMacroPlugin` -Previous versions of Airflow took additional arguments and displayed a message on the console. When the -message was not noticed by users, it caused very difficult to detect errors. +We removed `airflow.AirflowMacroPlugin` class. The class was there in airflow package but it has not been used (apparently since 2015). +It has been removed. -In order to restore the previous behavior, you must set an ``True`` in the ``allow_illegal_arguments`` -option of section ``[operators]`` in the ``airflow.cfg`` file. In the future it is possible to completely -delete this option. +#### `airflow.settings.CONTEXT_MANAGER_DAG` -### Simplification of the TriggerDagRunOperator +CONTEXT_MANAGER_DAG was removed from settings. It's role has been taken by `DagContext` in +'airflow.models.dag'. One of the reasons was that settings should be rather static than store +dynamic context from the DAG, but the main one is that moving the context out of settings allowed to +untangle cyclic imports between DAG, BaseOperator, SerializedDAG, SerializedBaseOperator which was +part of AIRFLOW-6010. -The TriggerDagRunOperator now takes a `conf` argument to which a dict can be provided as conf for the DagRun. -As a result, the `python_callable` argument was removed. PR: https://github.com/apache/airflow/pull/6317. +#### `airflow.utils.log.logging_mixin.redirect_stderr` +#### `airflow.utils.log.logging_mixin.redirect_stdout` -### Changes in Google Cloud Platform related hooks +Function `redirect_stderr` and `redirect_stdout` from `airflow.utils.log.logging_mixin` module has +been deleted because it can be easily replaced by the standard library. +The functions of the standard library are more flexible and can be used in larger cases. -The change in GCP operators implies that GCP Hooks for those operators require now keyword parameters rather -than positional ones in all methods where `project_id` is used. The methods throw an explanatory exception -in case they are called using positional parameters. +The code below +```python +import logging -Hooks involved: +from airflow.utils.log.logging_mixin import redirect_stderr, redirect_stdout - * DataflowHook - * MLEngineHook - * PubSubHook +logger = logging.getLogger("custom-logger") +with redirect_stdout(logger, logging.INFO), redirect_stderr(logger, logging.WARN): + print("I love Airflow") +``` +can be replaced by the following code: +```python +from contextlib import redirect_stdout, redirect_stderr +import logging -Other GCP hooks are unaffected. +from airflow.utils.log.logging_mixin import StreamLogWriter -### Fernet is enabled by default +logger = logging.getLogger("custom-logger") -The fernet mechanism is enabled by default to increase the security of the default installation. In order to -restore the previous behavior, the user must consciously set an empty key in the ``fernet_key`` option of -section ``[core]`` in the ``airflow.cfg`` file. +with redirect_stdout(StreamLogWriter(logger, logging.INFO)), \ + redirect_stderr(StreamLogWriter(logger, logging.WARN)): + print("I Love Airflow") +``` -At the same time, this means that the `apache-airflow[crypto]` extra-packages are always installed. -However, this requires that your operating system has ``libffi-dev`` installed. +#### `airflow.models.baseoperator.BaseOperator` -### Changes to Google PubSub Operators, Hook and Sensor -In the `PubSubPublishOperator` and `PubSubHook.publsh` method the data field in a message should be bytestring (utf-8 encoded) rather than base64 encoded string. +Now, additional arguments passed to BaseOperator cause an exception. Previous versions of Airflow took additional arguments and displayed a message on the console. When the +message was not noticed by users, it caused very difficult to detect errors. -Due to the normalization of the parameters within GCP operators and hooks a parameters like `project` or `topic_project` -are deprecated and will be substituted by parameter `project_id`. -In `PubSubHook.create_subscription` hook method in the parameter `subscription_project` is replaced by `subscription_project_id`. -Template fields are updated accordingly and old ones may not work. +In order to restore the previous behavior, you must set an ``True`` in the ``allow_illegal_arguments`` +option of section ``[operators]`` in the ``airflow.cfg`` file. In the future it is possible to completely +delete this option. -It is required now to pass key-word only arguments to `PubSub` hook. +#### `airflow.models.dagbag.DagBag` -These changes are not backward compatible. +Passing `store_serialized_dags` argument to DagBag.__init__ and accessing `DagBag.store_serialized_dags` property +are deprecated and will be removed in future versions. -Affected components: - * airflow.providers.google.cloud.hooks.pubsub.PubSubHook - * airflow.providers.google.cloud.operators.pubsub.PubSubTopicCreateOperator - * airflow.providers.google.cloud.operators.pubsub.PubSubSubscriptionCreateOperator - * airflow.providers.google.cloud.operators.pubsub.PubSubTopicDeleteOperator - * airflow.providers.google.cloud.operators.pubsub.PubSubSubscriptionDeleteOperator - * airflow.providers.google.cloud.operators.pubsub.PubSubPublishOperator - * airflow.providers.google.cloud.sensors.pubsub.PubSubPullSensor -### Removed Hipchat integration +**Previous signature**: -Hipchat has reached end of life and is no longer available. +```python +DagBag( + dag_folder=None, + include_examples=conf.getboolean('core', 'LOAD_EXAMPLES'), + safe_mode=conf.getboolean('core', 'DAG_DISCOVERY_SAFE_MODE'), + store_serialized_dags=False +): +``` -For more information please see -https://community.atlassian.com/t5/Stride-articles/Stride-and-Hipchat-Cloud-have-reached-End-of-Life-updated/ba-p/940248 +**current**: +```python +DagBag( + dag_folder=None, + include_examples=conf.getboolean('core', 'LOAD_EXAMPLES'), + safe_mode=conf.getboolean('core', 'DAG_DISCOVERY_SAFE_MODE'), + read_dags_from_db=False +): +``` + +If you were using positional arguments, it requires no change but if you were using keyword +arguments, please change `store_serialized_dags` to `read_dags_from_db`. -### The gcp_conn_id parameter in GKEPodOperator is required +Similarly, if you were using `DagBag().store_serialized_dags` property, change it to +`DagBag().read_dags_from_db`. -In previous versions, it was possible to pass the `None` value to the `gcp_conn_id` in the GKEPodOperator -operator, which resulted in credentials being determined according to the -[Application Default Credentials](https://cloud.google.com/docs/authentication/production) strategy. +### Changes in `google` provider package -Now this parameter requires a value. To restore the previous behavior, configure the connection without -specifying the service account. +We strive to ensure that there are no changes that may affect the end user and your Python files, but this +release may contain changes that will require changes to your configuration, DAG Files or other integration +e.g. custom operators. -Detailed information about connection management is available: -[Google Cloud Platform Connection](https://airflow.apache.org/howto/connection/gcp.html). +Only changes unique to this provider are described here. You should still pay attention to the changes that +have been made to the core (including core operators) as they can affect the integration behavior +of this provider. + +This section describes the changes that have been made, and what you need to do to update your if +you use operators or hooks which integrate with Google services (including Google Cloud Platform - GCP). -### Normalize gcp_conn_id for Google Cloud Platform +#### Normalize gcp_conn_id for Google Cloud Platform Previously not all hooks and operators related to Google Cloud Platform use `gcp_conn_id` as parameter for GCP connection. There is currently one parameter @@ -916,24 +1039,7 @@ Following components were affected by normalization: * airflow.operators.cassandra_to_gcs.CassandraToGoogleCloudStorageOperator * airflow.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator -### Changes to propagating Kubernetes worker annotations - -`kubernetes_annotations` configuration section has been removed. -A new key `worker_annotations` has been added to existing `kubernetes` section instead. -That is to remove restriction on the character set for k8s annotation keys. -All key/value pairs from `kubernetes_annotations` should now go to `worker_annotations` as a json. I.e. instead of e.g. -``` -[kubernetes_annotations] -annotation_key = annotation_value -annotation_key2 = annotation_value2 -``` -it should be rewritten to -``` -[kubernetes] -worker_annotations = { "annotation_key" : "annotation_value", "annotation_key2" : "annotation_value2" } -``` - -### Changes to import paths and names of GCP operators and hooks +#### Changes to import paths and names of GCP operators and hooks According to [AIP-21](https://cwiki.apache.org/confluence/display/AIRFLOW/AIP-21%3A+Changes+in+import+paths) operators related to Google Cloud Platform has been moved from contrib to core. @@ -1119,141 +1225,369 @@ The following table shows changes in import paths. |airflow.contrib.sensors.gcs_sensor.GoogleCloudStorageUploadSessionCompleteSensor |airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor | |airflow.contrib.sensors.pubsub_sensor.PubSubPullSensor |airflow.providers.google.cloud.sensors.pubsub.PubSubPullSensor | +#### Unify default conn_id for Google Cloud Platform + +Previously not all hooks and operators related to Google Cloud Platform use +``google_cloud_default`` as a default conn_id. There is currently one default +variant. Values like ``google_cloud_storage_default``, ``bigquery_default``, +``google_cloud_datastore_default`` have been deprecated. The configuration of +existing relevant connections in the database have been preserved. To use those +deprecated GCP conn_id, you need to explicitly pass their conn_id into +operators/hooks. Otherwise, ``google_cloud_default`` will be used as GCP's conn_id +by default. + +#### `airflow.providers.google.cloud.hooks.dataflow.DataflowHook` +#### `airflow.providers.google.cloud.operators.dataflow.DataflowCreateJavaJobOperator` +#### `airflow.providers.google.cloud.operators.dataflow.DataflowTemplatedJobStartOperator` +#### `airflow.providers.google.cloud.operators.dataflow.DataflowCreatePythonJobOperator` + +To use project_id argument consistently across GCP hooks and operators, we did the following changes: + +- Changed order of arguments in DataflowHook.start_python_dataflow. Uses + with positional arguments may break. +- Changed order of arguments in DataflowHook.is_job_dataflow_running. Uses + with positional arguments may break. +- Changed order of arguments in DataflowHook.cancel_job. Uses + with positional arguments may break. +- Added optional project_id argument to DataflowCreateJavaJobOperator + constructor. +- Added optional project_id argument to DataflowTemplatedJobStartOperator + constructor. +- Added optional project_id argument to DataflowCreatePythonJobOperator + constructor. + +#### `airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor` + +To provide more precise control in handling of changes to objects in +underlying GCS Bucket the constructor of this sensor now has changed. + +- Old Behavior: This constructor used to optionally take ``previous_num_objects: int``. +- New replacement constructor kwarg: ``previous_objects: Optional[Set[str]]``. + +Most users would not specify this argument because the bucket begins empty +and the user wants to treat any files as new. + +Example of Updating usage of this sensor: +Users who used to call: + +``GCSUploadSessionCompleteSensor(bucket='my_bucket', prefix='my_prefix', previous_num_objects=1)`` + +Will now call: + +``GCSUploadSessionCompleteSensor(bucket='my_bucket', prefix='my_prefix', previous_num_objects={'.keep'})`` + +Where '.keep' is a single file at your prefix that the sensor should not consider new. + +#### `airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor` +#### `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook` + +To simplify BigQuery operators (no need of `Cursor`) and standardize usage of hooks within all GCP integration methods from `BiqQueryBaseCursor` +were moved to `BigQueryHook`. Using them by from `Cursor` object is still possible due to preserved backward compatibility but they will raise `DeprecationWarning`. +The following methods were moved: + +| Old path | New path | +|------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------| +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.cancel_query | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.cancel_query | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.create_empty_dataset | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_empty_dataset | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.create_empty_table | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_empty_table | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.create_external_table | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_external_table | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.delete_dataset | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.delete_dataset | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_dataset | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_dataset_tables | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset_tables | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_dataset_tables_list | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset_tables_list | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_datasets_list | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_datasets_list | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_schema | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_schema | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_tabledata | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_tabledata | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.insert_all | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.insert_all | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.patch_dataset | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.patch_dataset | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.patch_table | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.patch_table | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.poll_job_complete | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.poll_job_complete | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_copy | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_copy | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_extract | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_extract | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_grant_dataset_view_access | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_grant_dataset_view_access | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_load | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_load | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_query | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_query | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_table_delete | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_table_delete | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_table_upsert | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_table_upsert | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_with_configuration | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_with_configuration | +| airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.update_dataset | airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.update_dataset | + +#### `airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor` + +Since BigQuery is the part of the GCP it was possible to simplify the code by handling the exceptions +by usage of the `airflow.providers.google.common.hooks.base.GoogleBaseHook.catch_http_exception` decorator however it changes +exceptions raised by the following methods: +* `airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_table_delete` raises `AirflowException` instead of `Exception`. +* `airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.create_empty_dataset` raises `AirflowException` instead of `ValueError`. +* `airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_dataset` raises `AirflowException` instead of `ValueError`. + +#### `airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyTableOperator` +#### `airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator` + +Idempotency was added to `BigQueryCreateEmptyTableOperator` and `BigQueryCreateEmptyDatasetOperator`. +But to achieve that try / except clause was removed from `create_empty_dataset` and `create_empty_table` +methods of `BigQueryHook`. + +#### `airflow.providers.google.cloud.hooks.dataflow.DataflowHook` +#### `airflow.providers.google.cloud.hooks.mlengine.MLEngineHook` +#### `airflow.providers.google.cloud.hooks.pubsub.PubSubHook` + +The change in GCP operators implies that GCP Hooks for those operators require now keyword parameters rather +than positional ones in all methods where `project_id` is used. The methods throw an explanatory exception +in case they are called using positional parameters. + +Other GCP hooks are unaffected. + +#### `airflow.providers.google.cloud.hooks.pubsub.PubSubHook` +#### `airflow.providers.google.cloud.operators.pubsub.PubSubTopicCreateOperator` +#### `airflow.providers.google.cloud.operators.pubsub.PubSubSubscriptionCreateOperator` +#### `airflow.providers.google.cloud.operators.pubsub.PubSubTopicDeleteOperator` +#### `airflow.providers.google.cloud.operators.pubsub.PubSubSubscriptionDeleteOperator` +#### `airflow.providers.google.cloud.operators.pubsub.PubSubPublishOperator` +#### `airflow.providers.google.cloud.sensors.pubsub.PubSubPullSensor` + +In the `PubSubPublishOperator` and `PubSubHook.publsh` method the data field in a message should be bytestring (utf-8 encoded) rather than base64 encoded string. + +Due to the normalization of the parameters within GCP operators and hooks a parameters like `project` or `topic_project` +are deprecated and will be substituted by parameter `project_id`. +In `PubSubHook.create_subscription` hook method in the parameter `subscription_project` is replaced by `subscription_project_id`. +Template fields are updated accordingly and old ones may not work. + +It is required now to pass key-word only arguments to `PubSub` hook. + +These changes are not backward compatible. + +#### `airflow.providers.google.cloud.operators.kubernetes_engine.GKEStartPodOperator` + +The gcp_conn_id parameter in GKEPodOperator is required. In previous versions, it was possible to pass +the `None` value to the `gcp_conn_id` in the GKEStartPodOperator +operator, which resulted in credentials being determined according to the +[Application Default Credentials](https://cloud.google.com/docs/authentication/production) strategy. + +Now this parameter requires a value. To restore the previous behavior, configure the connection without +specifying the service account. + +Detailed information about connection management is available: +[Google Cloud Platform Connection](https://airflow.apache.org/howto/connection/gcp.html). + + +#### `airflow.providers.google.cloud.hooks.gcs.GCSHook` + +* The following parameters have been replaced in all the methods in GCSHook: + * `bucket` is changed to `bucket_name` + * `object` is changed to `object_name` + +* The `maxResults` parameter in `GoogleCloudStorageHook.list` has been renamed to `max_results` for consistency. + +#### `airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPigJobOperator` +#### `airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHiveJobOperator` +#### `airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkSqlJobOperator` +#### `airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkJobOperator` +#### `airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHadoopJobOperator` +#### `airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPySparkJobOperator` + +The 'properties' and 'jars' properties for the Dataproc related operators (`DataprocXXXOperator`) have been renamed from +`dataproc_xxxx_properties` and `dataproc_xxx_jars` to `dataproc_properties` +and `dataproc_jars`respectively. +Arguments for dataproc_properties dataproc_jars + +#### `airflow.providers.google.cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceCreateJobOperator` + +To obtain pylint compatibility the `filter ` argument in `CloudDataTransferServiceCreateJobOperator` +has been renamed to `request_filter`. + +#### `airflow.providers.google.cloud.hooks.cloud_storage_transfer_service.CloudDataTransferServiceHook` + + To obtain pylint compatibility the `filter` argument in `CloudDataTransferServiceHook.list_transfer_job` and + `CloudDataTransferServiceHook.list_transfer_operations` has been renamed to `request_filter`. + +#### `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook` + +In general all hook methods are decorated with `@GoogleBaseHook.fallback_to_default_project_id` thus +parameters to hook can only be passed via keyword arguments. + +- `create_empty_table` method accepts now `table_resource` parameter. If provided all +other parameters are ignored. +- `create_empty_dataset` will now use values from `dataset_reference` instead of raising error +if parameters were passed in `dataset_reference` and as arguments to method. Additionally validation +of `dataset_reference` is done using `Dataset.from_api_repr`. Exception and log messages has been +changed. +- `update_dataset` requires now new `fields` argument (breaking change) +- `delete_dataset` has new signature (dataset_id, project_id, ...) +previous one was (project_id, dataset_id, ...) (breaking change) +- `get_tabledata` returns list of rows instead of API response in dict format. This method is deprecated in + favor of `list_rows`. (breaking change) + +#### `airflow.providers.google.cloud.hooks.dataflow.DataflowHook.start_python_dataflow` +#### `airflow.providers.google.cloud.hooks.dataflow.DataflowHook.start_python_dataflow` +#### `airflow.providers.google.cloud.operators.dataflow.DataflowCreatePythonJobOperator` -### Remove provide_context +Change python3 as Dataflow Hooks/Operators default interpreter -`provide_context` argument on the PythonOperator was removed. The signature of the callable passed to the PythonOperator is now inferred and argument values are always automatically provided. There is no need to explicitly provide or not provide the context anymore. For example: +Now the `py_interpreter` argument for DataFlow Hooks/Operators has been changed from python2 to python3. -```python -def myfunc(execution_date): - print(execution_date) +#### `airflow.providers.google.common.hooks.base_google.GoogleBaseHook` -python_operator = PythonOperator(task_id='mytask', python_callable=myfunc, dag=dag) -``` +To simplify the code, the decorator provide_gcp_credential_file has been moved from the inner-class. -Notice you don't have to set provide_context=True, variables from the task context are now automatically detected and provided. +Instead of `@GoogleBaseHook._Decorators.provide_gcp_credential_file`, +you should write `@GoogleBaseHook.provide_gcp_credential_file` -All context variables can still be provided with a double-asterisk argument: +#### `airflow.providers.google.cloud.operators.dataproc.DataprocCreateClusterOperator` -```python -def myfunc(**context): - print(context) # all variables will be provided to context +It is highly recommended to have 1TB+ disk size for Dataproc to have sufficient throughput: +https://cloud.google.com/compute/docs/disks/performance -python_operator = PythonOperator(task_id='mytask', python_callable=myfunc) -``` +Hence, the default value for `master_disk_size` in DataprocCreateClusterOperator has beeen changes from 500GB to 1TB. -The task context variable names are reserved names in the callable function, hence a clash with `op_args` and `op_kwargs` results in an exception: +#### `loud.operators.bigquery.BigQueryGetDatasetTablesOperator` -```python -def myfunc(dag): - # raises a ValueError because "dag" is a reserved name - # valid signature example: myfunc(mydag) +We changed signature of BigQueryGetDatasetTablesOperator. -python_operator = PythonOperator( - task_id='mytask', - op_args=[1], - python_callable=myfunc, -) +Before: +```python +BigQueryGetDatasetTablesOperator(dataset_id: str, dataset_resource: dict, ...) ``` -The change is backwards compatible, setting `provide_context` will add the `provide_context` variable to the `kwargs` (but won't do anything). - -PR: [#5990](https://github.com/apache/airflow/pull/5990) - -### Changes to FileSensor +After: +```python +BigQueryGetDatasetTablesOperator(dataset_resource: dict, dataset_id: Optional[str] = None, ...) +``` -FileSensor is now takes a glob pattern, not just a filename. If the filename you are looking for has `*`, `?`, or `[` in it then you should replace these with `[*]`, `[?]`, and `[[]`. +### Changes in `amazon` provider package -### Change dag loading duration metric name -Change DAG file loading duration metric from -`dag.loading-duration.` to `dag.loading-duration.`. This is to -better handle the case when a DAG file has multiple DAGs. +We strive to ensure that there are no changes that may affect the end user, and your Python files, but this +release may contain changes that will require changes to your configuration, DAG Files or other integration +e.g. custom operators. -### Changes to ImapHook, ImapAttachmentSensor and ImapAttachmentToS3Operator +Only changes unique to this provider are described here. You should still pay attention to the changes that +have been made to the core (including core operators) as they can affect the integration behavior +of this provider. -ImapHook: -* The order of arguments has changed for `has_mail_attachment`, -`retrieve_mail_attachments` and `download_mail_attachments`. -* A new `mail_filter` argument has been added to each of those. +This section describes the changes that have been made, and what you need to do to update your if +you use operators or hooks which integrate with Amazon services (including Amazon Web Service - AWS). -ImapAttachmentSensor: -* The order of arguments has changed for `__init__`. -* A new `mail_filter` argument has been added to `__init__`. +#### Migration of AWS components -ImapAttachmentToS3Operator: -* The order of arguments has changed for `__init__`. -* A new `imap_mail_filter` argument has been added to `__init__`. +All AWS components (hooks, operators, sensors, example DAGs) will be grouped together as decided in +[AIP-21](https://cwiki.apache.org/confluence/display/AIRFLOW/AIP-21%3A+Changes+in+import+paths). Migrated +components remain backwards compatible but raise a `DeprecationWarning` when imported from the old module. +Migrated are: -### Changes to `SubDagOperator` +| Old path | New path | +| ------------------------------------------------------------ | -------------------------------------------------------- | +| airflow.hooks.S3_hook.S3Hook | airflow.providers.amazon.aws.hooks.s3.S3Hook | +| airflow.contrib.hooks.aws_athena_hook.AWSAthenaHook | airflow.providers.amazon.aws.hooks.athena.AWSAthenaHook | +| airflow.contrib.hooks.aws_lambda_hook.AwsLambdaHook | airflow.providers.amazon.aws.hooks.lambda_function.AwsLambdaHook | +| airflow.contrib.hooks.aws_sqs_hook.SQSHook | airflow.providers.amazon.aws.hooks.sqs.SQSHook | +| airflow.contrib.hooks.aws_sns_hook.AwsSnsHook | airflow.providers.amazon.aws.hooks.sns.AwsSnsHook | +| airflow.contrib.operators.aws_athena_operator.AWSAthenaOperator | airflow.providers.amazon.aws.operators.athena.AWSAthenaOperator | +| airflow.contrib.operators.awsbatch.AWSBatchOperator | airflow.providers.amazon.aws.operators.batch.AwsBatchOperator | +| airflow.contrib.operators.awsbatch.BatchProtocol | airflow.providers.amazon.aws.hooks.batch_client.AwsBatchProtocol | +| private attrs and methods on AWSBatchOperator | airflow.providers.amazon.aws.hooks.batch_client.AwsBatchClient | +| n/a | airflow.providers.amazon.aws.hooks.batch_waiters.AwsBatchWaiters | +| airflow.contrib.operators.aws_sqs_publish_operator.SQSPublishOperator | airflow.providers.amazon.aws.operators.sqs.SQSPublishOperator | +| airflow.contrib.operators.aws_sns_publish_operator.SnsPublishOperator | airflow.providers.amazon.aws.operators.sns.SnsPublishOperator | +| airflow.contrib.sensors.aws_athena_sensor.AthenaSensor | airflow.providers.amazon.aws.sensors.athena.AthenaSensor | +| airflow.contrib.sensors.aws_sqs_sensor.SQSSensor | airflow.providers.amazon.aws.sensors.sqs.SQSSensor | -`SubDagOperator` is changed to use Airflow scheduler instead of backfill -to schedule tasks in the subdag. User no longer need to specify the executor -in `SubDagOperator`. +#### `airflow.providers.amazon.aws.hooks.emr.EmrHook` +#### `airflow.providers.amazon.aws.operators.emr_add_steps.EmrAddStepsOperator` +#### `airflow.providers.amazon.aws.operators.emr_create_job_flow.EmrCreateJobFlowOperator` +#### `airflow.providers.amazon.aws.operators.emr_terminate_job_flow.EmrTerminateJobFlowOperator` +The default value for the [aws_conn_id](https://airflow.apache.org/howto/manage-connections.html#amazon-web-services) was accidently set to 's3_default' instead of 'aws_default' in some of the emr operators in previous +versions. This was leading to EmrStepSensor not being able to find their corresponding emr cluster. With the new +changes in the EmrAddStepsOperator, EmrTerminateJobFlowOperator and EmrCreateJobFlowOperator this issue is +solved. -### Variables removed from the task instance context +#### `airflow.providers.amazon.aws.operators.batch.AwsBatchOperator` -The following variables were removed from the task instance context: -- end_date -- latest_date -- tables +The `AwsBatchOperator` was refactored to extract an `AwsBatchClient` (and inherit from it). The +changes are mostly backwards compatible and clarify the public API for these classes; some +private methods on `AwsBatchOperator` for polling a job status were relocated and renamed +to surface new public methods on `AwsBatchClient` (and via inheritance on `AwsBatchOperator`). A +couple of job attributes are renamed on an instance of `AwsBatchOperator`; these were mostly +used like private attributes but they were surfaced in the public API, so any use of them needs +to be updated as follows: +- `AwsBatchOperator().jobId` -> `AwsBatchOperator().job_id` +- `AwsBatchOperator().jobName` -> `AwsBatchOperator().job_name` -### Moved provide_gcp_credential_file decorator to GoogleBaseHook +The `AwsBatchOperator` gets a new option to define a custom model for waiting on job status changes. +The `AwsBatchOperator` can use a new `waiters` parameter, an instance of `AwsBatchWaiters`, to +specify that custom job waiters will be used to monitor a batch job. See the latest API +documentation for details. -To simplify the code, the decorator has been moved from the inner-class. +#### `airflow.providers.amazon.aws.sensors.athena.AthenaSensor` -Instead of `@GoogleBaseHook._Decorators.provide_gcp_credential_file`, -you should write `@GoogleBaseHook.provide_gcp_credential_file` +Replace parameter `max_retires` with `max_retries` to fix typo. -### Changes to S3Hook +#### `airflow.providers.amazon.aws.hooks.s3.S3Hook` Note: The order of arguments has changed for `check_for_prefix`. The `bucket_name` is now optional. It falls back to the `connection schema` attribute. The `delete_objects` now returns `None` instead of a response, since the method now makes multiple api requests when the keys list length is > 1000. -### Changes to Google Transfer Operator -To obtain pylint compatibility the `filter ` argument in `GcpTransferServiceOperationsListOperator` -has been renamed to `request_filter`. +### Changes in other provider packages -### Changes in Google Cloud Transfer Hook - To obtain pylint compatibility the `filter` argument in `GCPTransferServiceHook.list_transfer_job` and - `GCPTransferServiceHook.list_transfer_operations` has been renamed to `request_filter`. +We strive to ensure that there are no changes that may affect the end user and your Python files, but this +release may contain changes that will require changes to your configuration, DAG Files or other integration +e.g. custom operators. -### CLI reorganization +Only changes unique to providers are described here. You should still pay attention to the changes that +have been made to the core (including core operators) as they can affect the integration behavior +of this provider. -The Airflow CLI has been organized so that related commands are grouped -together as subcommands. The `airflow list_dags` command is now `airflow -dags list`, `airflow pause` is `airflow dags pause`, etc. For a complete -list of updated CLI commands, see https://airflow.apache.org/cli.html. +This section describes the changes that have been made, and what you need to do to update your if +you use any code located in `airflow.providers` package. -### Removal of Mesos Executor +#### Removed Hipchat integration -The Mesos Executor is removed from the code base as it was not widely used and not maintained. [Mailing List Discussion on deleting it](https://lists.apache.org/thread.html/daa9500026b820c6aaadeffd66166eae558282778091ebbc68819fb7@%3Cdev.airflow.apache.org%3E). +Hipchat has reached end of life and is no longer available. -### Increase standard Dataproc disk sizes +For more information please see +https://community.atlassian.com/t5/Stride-articles/Stride-and-Hipchat-Cloud-have-reached-End-of-Life-updated/ba-p/940248 -It is highly recommended to have 1TB+ disk size for Dataproc to have sufficient throughput: -https://cloud.google.com/compute/docs/disks/performance +#### `airflow.providers.salesforce.hooks.salesforce.SalesforceHook` -Hence, the default value for `master_disk_size` in DataprocCreateClusterOperator has beeen changes from 500GB to 1TB. +Replace parameter ``sandbox`` with ``domain``. According to change in simple-salesforce package. -### Changes to SalesforceHook +Rename `sign_in` function to `get_conn`. -* renamed `sign_in` function to `get_conn` +#### `airflow.providers.apache.pinot.hooks.pinot.PinotAdminHook.create_segment` -### HTTPHook verify default value changed from False to True. +Rename parameter name from ``format`` to ``segment_format`` in PinotAdminHook function create_segment fro pylint compatible -The HTTPHook is now secured by default: `verify=True`. -This can be overwriten by using the extra_options param as `{'verify': False}`. +#### `airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_partitions` -### Changes to GoogleCloudStorageHook +Rename parameter name from ``filter`` to ``partition_filter`` in HiveMetastoreHook function get_partitions for pylint compatible -* The following parameters have been replaced in all the methods in GCSHook: - * `bucket` is changed to `bucket_name` - * `object` is changed to `object_name` +#### `airflow.providers.ftp.hooks.ftp.FTPHook.list_directory` -* The `maxResults` parameter in `GoogleCloudStorageHook.list` has been renamed to `max_results` for consistency. +Remove unnecessary parameter ``nlst`` in FTPHook function ``list_directory`` for pylint compatible + +#### `airflow.providers.postgres.hooks.postgres.PostgresHook.copy_expert` + +Remove unnecessary parameter ``open`` in PostgresHook function ``copy_expert`` for pylint compatible + +#### `airflow.providers.opsgenie.operators.opsgenie_alert.OpsgenieAlertOperator` + +Change parameter name from ``visibleTo`` to ``visible_to`` in OpsgenieAlertOperator for pylint compatible + +#### `airflow.providers.imap.hooks.imap.ImapHook` +#### `airflow.providers.imap.sensors.imap_attachment.ImapAttachmentSensor` + +ImapHook: +* The order of arguments has changed for `has_mail_attachment`, +`retrieve_mail_attachments` and `download_mail_attachments`. +* A new `mail_filter` argument has been added to each of those. + + +#### `airflow.providers.http.hooks.http.HttpHook` + +The HTTPHook is now secured by default: `verify=True` (before: `verify=False`) +This can be overwriten by using the extra_options param as `{'verify': False}`. -### Changes to CloudantHook +#### `airflow.providers.cloudant.hooks.cloudant.CloudantHook` * upgraded cloudant version from `>=0.5.9,<2.0` to `>=2.0` * removed the use of the `schema` attribute in the connection @@ -1261,7 +1595,7 @@ This can be overwriten by using the extra_options param as `{'verify': False}`. For example: ```python -from airflow.contrib.hooks.cloudant_hook import CloudantHook +from airflow.providers.cloudant.hooks.cloudant import CloudantHook with CloudantHook().get_conn() as cloudant_session: database = cloudant_session['database_name'] @@ -1269,36 +1603,45 @@ with CloudantHook().get_conn() as cloudant_session: See the [docs](https://python-cloudant.readthedocs.io/en/latest/) for more information on how to use the new cloudant version. -### Unify default conn_id for Google Cloud Platform +#### `airflow.providers.snowflake` -Previously not all hooks and operators related to Google Cloud Platform use -``google_cloud_default`` as a default conn_id. There is currently one default -variant. Values like ``google_cloud_storage_default``, ``bigquery_default``, -``google_cloud_datastore_default`` have been deprecated. The configuration of -existing relevant connections in the database have been preserved. To use those -deprecated GCP conn_id, you need to explicitly pass their conn_id into -operators/hooks. Otherwise, ``google_cloud_default`` will be used as GCP's conn_id -by default. +When initializing a Snowflake hook or operator, the value used for `snowflake_conn_id` was always `snowflake_conn_id`, regardless of whether or not you specified a value for it. The default `snowflake_conn_id` value is now switched to `snowflake_default` for consistency and will be properly overriden when specified. -### Removed deprecated import mechanism +### Other changes -The deprecated import mechanism has been removed so the import of modules becomes more consistent and explicit. +This release also includes changes that fall outside any of the sections above. -For example: `from airflow.operators import BashOperator` -becomes `from airflow.operators.bash_operator import BashOperator` +#### Standardised "extra" requirements -### Changes to sensor imports +We standardised the Extras names and synchronized providers package names with the main airflow extras. -Sensors are now accessible via `airflow.sensors` and no longer via `airflow.operators.sensors`. +We deprecated a number of extras in 2.0. -For example: `from airflow.operators.sensors import BaseSensorOperator` -becomes `from airflow.sensors.base_sensor_operator import BaseSensorOperator` +| Deprecated extras | New extras | +|---------------------------|------------------| +| atlas | apache.atlas | +| aws | amazon | +| azure | microsoft.azure | +| azure_blob_storage | microsoft.azure | +| azure_data_lake | microsoft.azure | +| azure_cosmos | microsoft.azure | +| azure_container_instances | microsoft.azure | +| cassandra | apache.cassandra | +| druid | apache.druid | +| gcp | google | +| gcp_api | google | +| hdfs | apache.hdfs | +| hive | apache.hive | +| kubernetes | cncf.kubernetes | +| mssql | microsoft.mssql | +| pinot | apache.pinot | +| webhdfs | apache.webhdfs | +| winrm | apache.winrm | -### Renamed "extra" requirements for cloud providers +For example: -Subpackages for specific services have been combined into one variant for -each cloud provider. The name of the subpackage for the Google Cloud Platform -has changed to follow style. +If you want to install integration for Microsoft Azure, then instead of `pip install apache-airflow[atlas]` +you should use `pip install apache-airflow[apache.atlas]`. If you want to install integration for Microsoft Azure, then instead of ``` @@ -1309,92 +1652,9 @@ you should execute `pip install 'apache-airflow[azure]'` If you want to install integration for Amazon Web Services, then instead of `pip install 'apache-airflow[s3,emr]'`, you should execute `pip install 'apache-airflow[aws]'` -If you want to install integration for Google Cloud Platform, then instead of -`pip install 'apache-airflow[gcp_api]'`, you should execute `pip install 'apache-airflow[gcp]'`. -The old way will work until the release of Airflow 2.1. - -### Deprecate legacy UI in favor of FAB RBAC UI - -Previously we were using two versions of UI, which were hard to maintain as we need to implement/update the same feature -in both versions. With this change we've removed the older UI in favor of Flask App Builder RBAC UI. No need to set the -RBAC UI explicitly in the configuration now as this is the only default UI. -Please note that that custom auth backends will need re-writing to target new FAB based UI. - -As part of this change, a few configuration items in `[webserver]` section are removed and no longer applicable, -including `authenticate`, `filter_by_owner`, `owner_mode`, and `rbac`. - -#### Remove run_duration - -We should not use the `run_duration` option anymore. This used to be for restarting the scheduler from time to time, but right now the scheduler is getting more stable and therefore using this setting is considered bad and might cause an inconsistent state. - -### CLI Changes - -The ability to manipulate users from the command line has been changed. 'airflow create_user' and 'airflow delete_user' and 'airflow list_users' has been grouped to a single command `airflow users` with optional flags `--create`, `--list` and `--delete`. - -Example Usage: - -To create a new user: -```bash -airflow users --create --username jondoe --lastname doe --firstname jon --email jdoe@apache.org --role Viewer --password test -``` - -To list users: -```bash -airflow users --list -``` - -To delete a user: -```bash -airflow users --delete --username jondoe -``` - -To add a user to a role: -```bash -airflow users --add-role --username jondoe --role Public -``` - -To remove a user from a role: -```bash -airflow users --remove-role --username jondoe --role Public -``` - -### Unification of `do_xcom_push` flag -The `do_xcom_push` flag (a switch to push the result of an operator to xcom or not) was appearing in different incarnations in different operators. It's function has been unified under a common name (`do_xcom_push`) on `BaseOperator`. This way it is also easy to globally disable pushing results to xcom. - -The following operators were affected: - -* DatastoreExportOperator (Backwards compatible) -* DatastoreImportOperator (Backwards compatible) -* KubernetesPodOperator (Not backwards compatible) -* SSHOperator (Not backwards compatible) -* WinRMOperator (Not backwards compatible) -* BashOperator (Not backwards compatible) -* DockerOperator (Not backwards compatible) -* SimpleHttpOperator (Not backwards compatible) - -See [AIRFLOW-3249](https://jira.apache.org/jira/browse/AIRFLOW-3249) for details - -### Changes to Dataproc related Operators -The 'properties' and 'jars' properties for the Dataproc related operators (`DataprocXXXOperator`) have been renamed from -`dataproc_xxxx_properties` and `dataproc_xxx_jars` to `dataproc_properties` -and `dataproc_jars`respectively. -Arguments for dataproc_properties dataproc_jars - -### Changes to skipping behaviour of LatestOnlyOperator - -In previous versions, the `LatestOnlyOperator` forcefully skipped all (direct and undirect) downstream tasks on its own. From this version on the operator will **only skip direct downstream** tasks and the scheduler will handle skipping any further downstream dependencies. - -No change is needed if only the default trigger rule `all_success` is being used. - -If the DAG relies on tasks with other trigger rules (i.e. `all_done`) being skipped by the `LatestOnlyOperator`, adjustments to the DAG need to be made to commodate the change in behaviour, i.e. with additional edges from the `LatestOnlyOperator`. - -The goal of this change is to achieve a more consistent and configurale cascading behaviour based on the `BaseBranchOperator` (see [AIRFLOW-2923](https://jira.apache.org/jira/browse/AIRFLOW-2923) and [AIRFLOW-1784](https://jira.apache.org/jira/browse/AIRFLOW-1784)). - -### Change default snowflake_conn_id for Snowflake hook and operators - -When initializing a Snowflake hook or operator, the value used for `snowflake_conn_id` was always `snowflake_conn_id`, regardless of whether or not you specified a value for it. The default `snowflake_conn_id` value is now switched to `snowflake_default` for consistency and will be properly overriden when specified. +The deprecated extras will be removed in 2.1: -### Simplify the response payload of endpoints /dag_stats and /task_stats +#### Simplify the response payload of endpoints /dag_stats and /task_stats The response of endpoints `/dag_stats` and `/task_stats` help UI fetch brief statistics about DAGs and Tasks. The format was like @@ -1442,47 +1702,6 @@ Now the `dag_id` will not appear repeated in the payload, and the response forma } ``` -### Change in DagBag signature - -Passing `store_serialized_dags` argument to DagBag.__init__ and accessing `DagBag.store_serialized_dags` property -are deprecated and will be removed in future versions. - - -**Previous signature**: - -```python -DagBag( - dag_folder=None, - include_examples=conf.getboolean('core', 'LOAD_EXAMPLES'), - safe_mode=conf.getboolean('core', 'DAG_DISCOVERY_SAFE_MODE'), - store_serialized_dags=False -): -``` - -**current**: -```python -DagBag( - dag_folder=None, - include_examples=conf.getboolean('core', 'LOAD_EXAMPLES'), - safe_mode=conf.getboolean('core', 'DAG_DISCOVERY_SAFE_MODE'), - read_dags_from_db=False -): -``` - -If you were using positional arguments, it requires no change but if you were using keyword -arguments, please change `store_serialized_dags` to `read_dags_from_db`. - -Similarly, if you were using `DagBag().store_serialized_dags` property, change it to -`DagBag().read_dags_from_db`. - -### TimeSensor is now timezone aware - -Previously `TimeSensor` always compared the `target_time` with the current time in UTC. - -Now it will compare `target_time` with the current time in the timezone of the DAG, -defaulting to the `default_timezone` in the global config. - - ## Airflow 1.10.11 ### Use NULL as default value for dag.description @@ -1650,12 +1869,6 @@ This is the correct behavior for use with BigQuery, since BigQuery assumes that TIMESTAMP columns without time zones are in UTC. To preserve the previous behavior, set `ensure_utc` to `False.` -### Python 2 support is going away - -Airflow 1.10 will be the last release series to support Python 2. Airflow 2.0.0 will only support Python 3.5 and up. - -If you have a specific task that still requires Python 2 then you can use the PythonVirtualenvOperator for this. - ### Changes to DatastoreHook * removed argument `version` from `get_conn` function and added it to the hook's `__init__` function instead and renamed it to `api_version` diff --git a/airflow/api_connexion/openapi/v1.yaml b/airflow/api_connexion/openapi/v1.yaml index 96467faeab357..5f7e33905253f 100644 --- a/airflow/api_connexion/openapi/v1.yaml +++ b/airflow/api_connexion/openapi/v1.yaml @@ -289,7 +289,8 @@ paths: post: summary: Trigger a DAG Run - operationId: airflow.api_connexion.endpoints.dag_run_endpoint.post_dag_run + x-openapi-router-controller: airflow.api_connexion.endpoints.dag_run_endpoint + operationId: post_dag_run tags: [DAGRun] requestBody: required: true @@ -1882,7 +1883,7 @@ components: description: > Returns objects greater or equal the specified date. - This can be combined with startd_ate_lte parameter to receive only the selected period. + This can be combined with start_date_lte parameter to receive only the selected period. start_date_lte: type: string format: date-time @@ -2269,7 +2270,7 @@ components: description: > Returns objects greater or equal the specified date. - This can be combined with startd_ate_lte parameter to receive only the selected period. + This can be combined with start_date_lte parameter to receive only the selected period. FilterStartDateLTE: in: query name: start_date_lte diff --git a/airflow/cli/cli_parser.py b/airflow/cli/cli_parser.py index ae1bf8306cac5..4ddc27b0c573c 100644 --- a/airflow/cli/cli_parser.py +++ b/airflow/cli/cli_parser.py @@ -28,6 +28,7 @@ from tabulate import tabulate_formats from airflow import settings +from airflow.cli.commands.legacy_commands import check_legacy_command from airflow.configuration import conf from airflow.exceptions import AirflowException from airflow.executors.executor_loader import ExecutorLoader @@ -65,6 +66,8 @@ def _check_value(self, action, value): if value == 'celery' and executor != ExecutorLoader.CELERY_EXECUTOR: message = f'celery subcommand works only with CeleryExecutor, your current executor: {executor}' raise ArgumentError(action, message) + if action.choices is not None and value not in action.choices: + check_legacy_command(action, value) super()._check_value(action, value) @@ -626,6 +629,15 @@ def positive_int(value): ), action="store_true", default=False) +ARG_CONN_EXPORT = Arg( + ('file',), + help='Output file path for exporting the connections', + type=argparse.FileType('w', encoding='UTF-8')) +ARG_CONN_EXPORT_FORMAT = Arg( + ('--format',), + help='Format of the connections data in file', + type=str, + choices=['json', 'yaml', 'env']) # users ARG_USERNAME = Arg( ('-u', '--username'), @@ -730,6 +742,17 @@ def positive_int(value): action='store_true' ) +# config +ARG_SECTION = Arg( + ("section",), + help="The section name", +) +ARG_OPTION = Arg( + ("option",), + help="The option name", +) + + ALTERNATIVE_CONN_SPECS_ARGS = [ ARG_CONN_TYPE, ARG_CONN_HOST, ARG_CONN_LOGIN, ARG_CONN_PASSWORD, ARG_CONN_SCHEMA, ARG_CONN_PORT ] @@ -1093,6 +1116,22 @@ class GroupCommand(NamedTuple): func=lazy_load_command('airflow.cli.commands.connection_command.connections_delete'), args=(ARG_CONN_ID,), ), + ActionCommand( + name='export', + help='Export all connections', + description=("All connections can be exported in STDOUT using the following command:\n" + "airflow connections export -\n" + "The file format can be determined by the provided file extension. eg, The following " + "command will export the connections in JSON format:\n" + "airflow connections export /tmp/connections.json\n" + "The --format parameter can be used to mention the connections format. eg, " + "the default format is JSON in STDOUT mode, which can be overridden using: \n" + "airflow connections export - --format yaml\n" + "The --format parameter can also be used for the files, for example:\n" + "airflow connections export /tmp/connections --format json\n"), + func=lazy_load_command('airflow.cli.commands.connection_command.connections_export'), + args=(ARG_CONN_EXPORT, ARG_CONN_EXPORT_FORMAT,), + ), ) USERS_COMMANDS = ( ActionCommand( @@ -1184,6 +1223,21 @@ class GroupCommand(NamedTuple): ) ) +CONFIG_COMMANDS = ( + ActionCommand( + name='get-value', + help='Print the value of the configuration', + func=lazy_load_command('airflow.cli.commands.config_command.get_value'), + args=(ARG_SECTION, ARG_OPTION, ), + ), + ActionCommand( + name='list', + help='List options for the configuration.', + func=lazy_load_command('airflow.cli.commands.config_command.show_config'), + args=(ARG_COLOR, ), + ), +) + airflow_commands: List[CLICommand] = [ GroupCommand( name='dags', @@ -1273,11 +1327,10 @@ class GroupCommand(NamedTuple): ), args=(), ), - ActionCommand( - name='config', - help='Show current application configuration', - func=lazy_load_command('airflow.cli.commands.config_command.show_config'), - args=(ARG_COLOR, ), + GroupCommand( + name="config", + help='View the configuration options.', + subcommands=CONFIG_COMMANDS ), ActionCommand( name='info', @@ -1323,13 +1376,13 @@ def _format_action(self, action: Action): self._indent() subactions = action._get_subactions() # pylint: disable=protected-access - action_subcommnads, group_subcommnands = partition( + action_subcommands, group_subcommands = partition( lambda d: isinstance(ALL_COMMANDS_DICT[d.dest], GroupCommand), subactions ) parts.append("\n") parts.append('%*s%s:\n' % (self._current_indent, '', "Groups")) self._indent() - for subaction in group_subcommnands: + for subaction in group_subcommands: parts.append(self._format_action(subaction)) self._dedent() @@ -1337,7 +1390,7 @@ def _format_action(self, action: Action): parts.append('%*s%s:\n' % (self._current_indent, '', "Commands")) self._indent() - for subaction in action_subcommnads: + for subaction in action_subcommands: parts.append(self._format_action(subaction)) self._dedent() self._dedent() diff --git a/airflow/cli/commands/config_command.py b/airflow/cli/commands/config_command.py index 0a1e00c9c5d31..22bc6319c475f 100644 --- a/airflow/cli/commands/config_command.py +++ b/airflow/cli/commands/config_command.py @@ -16,6 +16,7 @@ # under the License. """Config sub-commands""" import io +import sys import pygments from pygments.formatters.terminal import TerminalFormatter @@ -35,3 +36,17 @@ def show_config(args): code=code, formatter=TerminalFormatter(), lexer=IniLexer() ) print(code) + + +def get_value(args): + """Get one value from configuration""" + if not conf.has_section(args.section): + print(f'The section [{args.section}] is not found in config.', file=sys.stderr) + sys.exit(1) + + if not conf.has_option(args.section, args.option): + print(f'The option [{args.section}/{args.option}] is not found in config.', file=sys.stderr) + sys.exit(1) + + value = conf.get(args.section, args.option) + print(value) diff --git a/airflow/cli/commands/connection_command.py b/airflow/cli/commands/connection_command.py index cc23849436ca3..39dec970845ad 100644 --- a/airflow/cli/commands/connection_command.py +++ b/airflow/cli/commands/connection_command.py @@ -15,10 +15,14 @@ # specific language governing permissions and limitations # under the License. """Connection sub-commands""" +import io +import json +import os import sys from typing import List from urllib.parse import urlunparse +import yaml from sqlalchemy.orm import exc from tabulate import tabulate @@ -67,6 +71,69 @@ def connections_list(args): print(msg) +def _format_connections(conns: List[Connection], fmt: str) -> str: + if fmt == '.env': + connections_env = "" + for conn in conns: + connections_env += f"{conn.conn_id}={conn.get_uri()}\n" + return connections_env + + connections_dict = {} + for conn in conns: + connections_dict[conn.conn_id] = { + 'conn_type': conn.conn_type, + 'host': conn.host, + 'login': conn.login, + 'password': conn.password, + 'schema': conn.schema, + 'port': conn.port, + 'extra': conn.extra, + } + + if fmt == '.yaml': + return yaml.dump(connections_dict) + + if fmt == '.json': + return json.dumps(connections_dict) + + return json.dumps(connections_dict) + + +def _is_stdout(fileio: io.TextIOWrapper) -> bool: + if fileio.name == '': + return True + return False + + +def connections_export(args): + """Exports all connections to a file""" + allowed_formats = ['.yaml', '.json', '.env'] + provided_format = None if args.format is None else f".{args.format.lower()}" + default_format = provided_format or '.json' + + with create_session() as session: + if _is_stdout(args.file): + filetype = default_format + elif provided_format is not None: + filetype = provided_format + else: + _, filetype = os.path.splitext(args.file.name) + filetype = filetype.lower() + if filetype not in allowed_formats: + msg = f"Unsupported file format. " \ + f"The file must have the extension {', '.join(allowed_formats)}" + raise SystemExit(msg) + + connections = session.query(Connection).all() + msg = _format_connections(connections, filetype) + args.file.write(msg) + + if _is_stdout(args.file): + print("Connections successfully exported.", file=sys.stderr) + else: + print(f"Connections successfully exported to {args.file.name}") + + alternative_conn_specs = ['conn_type', 'conn_host', 'conn_login', 'conn_password', 'conn_schema', 'conn_port'] diff --git a/airflow/cli/commands/dag_command.py b/airflow/cli/commands/dag_command.py index 55c40b4d812bd..e003f16ba1427 100644 --- a/airflow/cli/commands/dag_command.py +++ b/airflow/cli/commands/dag_command.py @@ -43,7 +43,7 @@ def _tabulate_dag_runs(dag_runs: List[DagRun], tablefmt: str = "fancy_grid") -> str: - tabulat_data = ( + tabulate_data = ( { 'ID': dag_run.id, 'Run ID': dag_run.run_id, @@ -55,13 +55,13 @@ def _tabulate_dag_runs(dag_runs: List[DagRun], tablefmt: str = "fancy_grid") -> } for dag_run in dag_runs ) return tabulate( - tabular_data=tabulat_data, + tabular_data=tabulate_data, tablefmt=tablefmt ) def _tabulate_dags(dags: List[DAG], tablefmt: str = "fancy_grid") -> str: - tabulat_data = ( + tabulate_data = ( { 'DAG ID': dag.dag_id, 'Filepath': dag.filepath, @@ -69,7 +69,7 @@ def _tabulate_dags(dags: List[DAG], tablefmt: str = "fancy_grid") -> str: } for dag in sorted(dags, key=lambda d: d.dag_id) ) return tabulate( - tabular_data=tabulat_data, + tabular_data=tabulate_data, tablefmt=tablefmt, headers='keys' ) @@ -261,10 +261,10 @@ def dag_state(args): dag = get_dag_by_file_location(args.dag_id) dr = DagRun.find(dag.dag_id, execution_date=args.execution_date) out = dr[0].state if dr else None - confout = '' + conf_out = '' if out and dr[0].conf: - confout = ', ' + json.dumps(dr[0].conf) - print(str(out) + confout) + conf_out = ', ' + json.dumps(dr[0].conf) + print(str(out) + conf_out) @cli_utils.action_logging diff --git a/airflow/cli/commands/info_command.py b/airflow/cli/commands/info_command.py index 45c93219ea0d7..487e05a6968c8 100644 --- a/airflow/cli/commands/info_command.py +++ b/airflow/cli/commands/info_command.py @@ -43,7 +43,7 @@ def process_path(self, value): """Remove pii from paths""" def process_username(self, value): - """Remove pii from ussername""" + """Remove pii from username""" def process_url(self, value): """Remove pii from URL""" @@ -247,7 +247,7 @@ def __str__(self): class PathsInfo: - """Path informaation""" + """Path information""" def __init__(self, anonymizer: Anonymizer): system_path = os.environ.get("PATH", "").split(os.pathsep) @@ -391,7 +391,7 @@ class FileIoException(Exception): after=tenacity.after_log(log, logging.DEBUG), ) def _upload_text_to_fileio(content): - """Uload text file to File.io service and return lnk""" + """Upload text file to File.io service and return lnk""" resp = requests.post("https://file.io", files={"file": ("airflow-report.txt", content)}) if not resp.ok: raise FileIoException("Failed to send report to file.io service.") diff --git a/airflow/cli/commands/legacy_commands.py b/airflow/cli/commands/legacy_commands.py new file mode 100644 index 0000000000000..b2ca64ae08c27 --- /dev/null +++ b/airflow/cli/commands/legacy_commands.py @@ -0,0 +1,56 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from argparse import ArgumentError + +COMMAND_MAP = { + "worker": "celery worker", + "flower": "celery flower", + "trigger_dag": "dags trigger", + "delete_dag": "dags delete", + "show_dag": "dags show", + "list_dag": "dags list", + "dag_status": "dags status", + "backfill": "dags backfill", + "list_dag_runs": "dags list_runs", + "pause": "dags pause", + "unpause": "dags unpause", + "test": "tasks test", + "clear": "tasks clear", + "list_tasks": "tasks list", + "task_failed_deps": "tasks failed_deps", + "task_state": "tasks state", + "run": "tasks run", + "render": "tasks render", + "initdb": "db init", + "resetdb": "db reset", + "upgradedb": "db upgrade", + "checkdb": "db check", + "shell": "db shell", + "pool": "pools", + "list_users": "users list", + "create_user": "users create", + "delete_user": "users delete" +} + + +def check_legacy_command(action, value): + """ Checks command value and raise error if value is in removed command """ + new_command = COMMAND_MAP.get(value) + if new_command is not None: + msg = f"`airflow {value}` command, has been removed, please use `airflow {new_command}`" + raise ArgumentError(action, msg) diff --git a/airflow/cli/commands/webserver_command.py b/airflow/cli/commands/webserver_command.py index 1039ea338f37f..a8d4cd86f415b 100644 --- a/airflow/cli/commands/webserver_command.py +++ b/airflow/cli/commands/webserver_command.py @@ -75,7 +75,7 @@ class GunicornMonitor(LoggingMixin): :param worker_refresh_batch_size: Number of workers to refresh at a time. When set to 0, worker refresh is disabled. When nonzero, airflow periodically refreshes webserver workers by bringing up new ones and killing old ones. - :param reload_on_plugin_change: If set to True, Airflow will track files in plugins_follder directory. + :param reload_on_plugin_change: If set to True, Airflow will track files in plugins_folder directory. When it detects changes, then reload the gunicorn. """ def __init__( diff --git a/airflow/config_templates/airflow_local_settings.py b/airflow/config_templates/airflow_local_settings.py index 3d3a15d72e940..afe4aea1df6d3 100644 --- a/airflow/config_templates/airflow_local_settings.py +++ b/airflow/config_templates/airflow_local_settings.py @@ -18,12 +18,12 @@ """Airflow logging settings""" import os +from pathlib import Path from typing import Any, Dict, Union from urllib.parse import urlparse from airflow.configuration import conf from airflow.exceptions import AirflowException -from airflow.utils.file import mkdirs # TODO: Logging format and level should be configured # in this file instead of from airflow.cfg. Currently @@ -151,7 +151,7 @@ processor_manager_handler_config: Dict[str, Any] = \ DEFAULT_DAG_PARSING_LOGGING_CONFIG['handlers']['processor_manager'] directory: str = os.path.dirname(processor_manager_handler_config['filename']) - mkdirs(directory, 0o755) + Path(directory).mkdir(parents=True, exist_ok=True, mode=0o755) ################## # Remote logging # diff --git a/airflow/example_dags/example_bash_operator.py b/airflow/example_dags/example_bash_operator.py index 56f03c5960b23..1c91cd0957e7c 100644 --- a/airflow/example_dags/example_bash_operator.py +++ b/airflow/example_dags/example_bash_operator.py @@ -27,13 +27,13 @@ args = { 'owner': 'airflow', - 'start_date': days_ago(2), } dag = DAG( dag_id='example_bash_operator', default_args=args, schedule_interval='0 0 * * *', + start_date=days_ago(2), dagrun_timeout=timedelta(minutes=60), tags=['example'] ) diff --git a/airflow/example_dags/example_branch_operator.py b/airflow/example_dags/example_branch_operator.py index 0b37738360d35..66b9fff573e5b 100644 --- a/airflow/example_dags/example_branch_operator.py +++ b/airflow/example_dags/example_branch_operator.py @@ -27,12 +27,12 @@ args = { 'owner': 'airflow', - 'start_date': days_ago(2), } dag = DAG( dag_id='example_branch_operator', default_args=args, + start_date=days_ago(2), schedule_interval="@daily", tags=['example'] ) diff --git a/airflow/example_dags/example_branch_python_dop_operator_3.py b/airflow/example_dags/example_branch_python_dop_operator_3.py index d0c1eda08b4c4..0a45e6093ebe9 100644 --- a/airflow/example_dags/example_branch_python_dop_operator_3.py +++ b/airflow/example_dags/example_branch_python_dop_operator_3.py @@ -28,13 +28,13 @@ args = { 'owner': 'airflow', - 'start_date': days_ago(2), 'depends_on_past': True, } dag = DAG( dag_id='example_branch_dop_operator_v3', schedule_interval='*/1 * * * *', + start_date=days_ago(2), default_args=args, tags=['example'] ) diff --git a/airflow/example_dags/example_complex.py b/airflow/example_dags/example_complex.py index 54e27b8f60b74..1d2e582f8e530 100644 --- a/airflow/example_dags/example_complex.py +++ b/airflow/example_dags/example_complex.py @@ -26,12 +26,10 @@ from airflow.operators.python import PythonOperator from airflow.utils.dates import days_ago -default_args = {"start_date": days_ago(1)} - with models.DAG( dag_id="example_complex", - default_args=default_args, schedule_interval=None, + start_date=days_ago(1), tags=['example'], ) as dag: diff --git a/airflow/example_dags/example_kubernetes_executor.py b/airflow/example_dags/example_kubernetes_executor.py index b888792790063..a55e5b1bf909f 100644 --- a/airflow/example_dags/example_kubernetes_executor.py +++ b/airflow/example_dags/example_kubernetes_executor.py @@ -27,13 +27,13 @@ args = { 'owner': 'airflow', - 'start_date': days_ago(2) } with DAG( dag_id='example_kubernetes_executor', default_args=args, schedule_interval=None, + start_date=days_ago(2), tags=['example'], ) as dag: diff --git a/airflow/example_dags/example_kubernetes_executor_config.py b/airflow/example_dags/example_kubernetes_executor_config.py index aec0fdde3e7fa..5fef13546efb8 100644 --- a/airflow/example_dags/example_kubernetes_executor_config.py +++ b/airflow/example_dags/example_kubernetes_executor_config.py @@ -27,13 +27,13 @@ default_args = { 'owner': 'airflow', - 'start_date': days_ago(2) } with DAG( dag_id='example_kubernetes_executor_config', default_args=default_args, schedule_interval=None, + start_date=days_ago(2), tags=['example'], ) as dag: diff --git a/airflow/example_dags/example_passing_params_via_test_command.py b/airflow/example_dags/example_passing_params_via_test_command.py index 5b56a9ae37ebc..bcd5318c65fe1 100644 --- a/airflow/example_dags/example_passing_params_via_test_command.py +++ b/airflow/example_dags/example_passing_params_via_test_command.py @@ -30,9 +30,9 @@ "example_passing_params_via_test_command", default_args={ "owner": "airflow", - "start_date": days_ago(1), }, schedule_interval='*/1 * * * *', + start_date=days_ago(1), dagrun_timeout=timedelta(minutes=4), tags=['example'] ) diff --git a/airflow/example_dags/example_python_operator.py b/airflow/example_dags/example_python_operator.py index 8602399c79776..5b6d7b54d0b74 100644 --- a/airflow/example_dags/example_python_operator.py +++ b/airflow/example_dags/example_python_operator.py @@ -26,13 +26,13 @@ args = { 'owner': 'airflow', - 'start_date': days_ago(2), } dag = DAG( dag_id='example_python_operator', default_args=args, schedule_interval=None, + start_date=days_ago(2), tags=['example'] ) @@ -72,6 +72,7 @@ def my_sleeping_function(random_base): # [END howto_operator_python_kwargs] +# [START howto_operator_python_venv] def callable_virtualenv(): """ Example function that will be performed in a virtual environment. @@ -101,3 +102,4 @@ def callable_virtualenv(): system_site_packages=False, dag=dag, ) +# [END howto_operator_python_venv] diff --git a/airflow/example_dags/example_short_circuit_operator.py b/airflow/example_dags/example_short_circuit_operator.py index fd565a3c14cc7..f454048ca55dd 100644 --- a/airflow/example_dags/example_short_circuit_operator.py +++ b/airflow/example_dags/example_short_circuit_operator.py @@ -25,10 +25,14 @@ args = { 'owner': 'airflow', - 'start_date': dates.days_ago(2), } -dag = DAG(dag_id='example_short_circuit_operator', default_args=args, tags=['example']) +dag = DAG( + dag_id='example_short_circuit_operator', + default_args=args, + start_date=dates.days_ago(2), + tags=['example'], +) cond_true = ShortCircuitOperator( task_id='condition_is_True', diff --git a/airflow/example_dags/example_skip_dag.py b/airflow/example_dags/example_skip_dag.py index 2016fd84eac36..e5ffdd6e9b14d 100644 --- a/airflow/example_dags/example_skip_dag.py +++ b/airflow/example_dags/example_skip_dag.py @@ -25,7 +25,6 @@ args = { 'owner': 'airflow', - 'start_date': days_ago(2), } @@ -57,6 +56,6 @@ def create_test_pipeline(suffix, trigger_rule, dag_): join >> final -dag = DAG(dag_id='example_skip_dag', default_args=args, tags=['example']) +dag = DAG(dag_id='example_skip_dag', default_args=args, start_date=days_ago(2), tags=['example']) create_test_pipeline('1', 'all_success', dag) create_test_pipeline('2', 'one_success', dag) diff --git a/airflow/example_dags/example_subdag_operator.py b/airflow/example_dags/example_subdag_operator.py index 6a3bc1c09601e..f21e3d4db1577 100644 --- a/airflow/example_dags/example_subdag_operator.py +++ b/airflow/example_dags/example_subdag_operator.py @@ -29,12 +29,12 @@ args = { 'owner': 'airflow', - 'start_date': days_ago(2), } dag = DAG( dag_id=DAG_NAME, default_args=args, + start_date=days_ago(2), schedule_interval="@once", tags=['example'] ) diff --git a/airflow/example_dags/example_trigger_controller_dag.py b/airflow/example_dags/example_trigger_controller_dag.py index e48332b20d86e..f8fd5d6610b98 100644 --- a/airflow/example_dags/example_trigger_controller_dag.py +++ b/airflow/example_dags/example_trigger_controller_dag.py @@ -27,7 +27,8 @@ dag = DAG( dag_id="example_trigger_controller_dag", - default_args={"owner": "airflow", "start_date": days_ago(2)}, + default_args={"owner": "airflow"}, + start_date=days_ago(2), schedule_interval="@once", tags=['example'] ) diff --git a/airflow/example_dags/example_trigger_target_dag.py b/airflow/example_dags/example_trigger_target_dag.py index 83db9458cbcd8..3f4cfd0f3c856 100644 --- a/airflow/example_dags/example_trigger_target_dag.py +++ b/airflow/example_dags/example_trigger_target_dag.py @@ -29,7 +29,8 @@ dag = DAG( dag_id="example_trigger_target_dag", - default_args={"start_date": days_ago(2), "owner": "airflow"}, + default_args={"owner": "airflow"}, + start_date=days_ago(2), schedule_interval=None, tags=['example'] ) diff --git a/airflow/example_dags/example_xcom.py b/airflow/example_dags/example_xcom.py index 858dc4067a6e8..b3956822a3f64 100644 --- a/airflow/example_dags/example_xcom.py +++ b/airflow/example_dags/example_xcom.py @@ -21,12 +21,13 @@ from airflow.operators.python import PythonOperator from airflow.utils.dates import days_ago -args = { - 'owner': 'airflow', - 'start_date': days_ago(2), -} - -dag = DAG('example_xcom', schedule_interval="@once", default_args=args, tags=['example']) +dag = DAG( + 'example_xcom', + schedule_interval="@once", + start_date=days_ago(2), + default_args={'owner': 'airflow'}, + tags=['example'] +) value_1 = [1, 2, 3] value_2 = {'a': 'b'} diff --git a/airflow/example_dags/example_xcomargs.py b/airflow/example_dags/example_xcomargs.py index 019b1bd7baa99..42faadd0e5585 100644 --- a/airflow/example_dags/example_xcomargs.py +++ b/airflow/example_dags/example_xcomargs.py @@ -25,11 +25,6 @@ log = logging.getLogger(__name__) -args = { - 'owner': 'airflow', - 'start_date': days_ago(2), -} - def generate_value(): """Dummy function""" @@ -45,7 +40,8 @@ def print_value(value): with DAG( dag_id='example_xcom_args', - default_args=args, + default_args={'owner': 'airflow'}, + start_date=days_ago(2), schedule_interval=None, tags=['example'] ) as dag: diff --git a/airflow/example_dags/subdags/subdag.py b/airflow/example_dags/subdags/subdag.py index b510ca7295d33..44f6aa6bf98bc 100644 --- a/airflow/example_dags/subdags/subdag.py +++ b/airflow/example_dags/subdags/subdag.py @@ -21,6 +21,7 @@ # [START subdag] from airflow import DAG from airflow.operators.dummy_operator import DummyOperator +from airflow.utils.dates import days_ago def subdag(parent_dag_name, child_dag_name, args): @@ -36,6 +37,7 @@ def subdag(parent_dag_name, child_dag_name, args): dag_subdag = DAG( dag_id='%s.%s' % (parent_dag_name, child_dag_name), default_args=args, + start_date=days_ago(2), schedule_interval="@daily", ) diff --git a/airflow/example_dags/tutorial.py b/airflow/example_dags/tutorial.py index efdcf80e0a2bf..39f779c905b67 100644 --- a/airflow/example_dags/tutorial.py +++ b/airflow/example_dags/tutorial.py @@ -39,7 +39,6 @@ default_args = { 'owner': 'airflow', 'depends_on_past': False, - 'start_date': days_ago(2), 'email': ['airflow@example.com'], 'email_on_failure': False, 'email_on_retry': False, @@ -67,6 +66,7 @@ default_args=default_args, description='A simple tutorial DAG', schedule_interval=timedelta(days=1), + start_date=days_ago(2), tags=['example'], ) # [END instantiate_dag] diff --git a/airflow/exceptions.py b/airflow/exceptions.py index 9c2bde9599005..3038bb6ec4b4a 100644 --- a/airflow/exceptions.py +++ b/airflow/exceptions.py @@ -182,3 +182,7 @@ def __str__(self): result += "\n" + prepare_code_snippet(self.file_path, parse_error.line_no) + "\n" return result + + +class ConnectionNotUnique(AirflowException): + """Raise when multiple values are found for the same conn_id""" diff --git a/airflow/executors/base_executor.py b/airflow/executors/base_executor.py index ddcea54a1cdcc..80f96ca95f31b 100644 --- a/airflow/executors/base_executor.py +++ b/airflow/executors/base_executor.py @@ -264,3 +264,9 @@ def terminate(self): This method is called when the daemon receives a SIGTERM """ raise NotImplementedError() + + @staticmethod + def validate_command(command: List[str]) -> None: + """Check if the command to execute is airflow comnand""" + if command[0:3] != ["airflow", "tasks", "run"]: + raise ValueError('The command must start with ["airflow", "tasks", "run"].') diff --git a/airflow/executors/celery_executor.py b/airflow/executors/celery_executor.py index 96771aebf350e..81e60722bb4b2 100644 --- a/airflow/executors/celery_executor.py +++ b/airflow/executors/celery_executor.py @@ -71,9 +71,7 @@ @app.task def execute_command(command_to_exec: CommandType) -> None: """Executes command.""" - if command_to_exec[0:3] != ["airflow", "tasks", "run"]: - raise ValueError('The command must start with ["airflow", "tasks", "run"].') - + BaseExecutor.validate_command(command_to_exec) log.info("Executing command in Celery: %s", command_to_exec) env = os.environ.copy() try: diff --git a/airflow/executors/dask_executor.py b/airflow/executors/dask_executor.py index 1ae9ef35f3905..76f32d40a14b3 100644 --- a/airflow/executors/dask_executor.py +++ b/airflow/executors/dask_executor.py @@ -72,8 +72,7 @@ def execute_async(self, queue: Optional[str] = None, executor_config: Optional[Any] = None) -> None: - if command[0:3] != ["airflow", "tasks", "run"]: - raise ValueError('The command must start with ["airflow", "tasks", "run"].') + self.validate_command(command) def airflow_run(): return subprocess.check_call(command, close_fds=True) diff --git a/airflow/executors/local_executor.py b/airflow/executors/local_executor.py index 7bf8ddc6bfc3b..4800e66aca606 100644 --- a/airflow/executors/local_executor.py +++ b/airflow/executors/local_executor.py @@ -287,8 +287,7 @@ def execute_async(self, key: TaskInstanceKey, if not self.impl: raise AirflowException(NOT_STARTED_MESSAGE) - if command[0:3] != ["airflow", "tasks", "run"]: - raise ValueError('The command must start with ["airflow", "tasks", "run"].') + self.validate_command(command) self.impl.execute_async(key=key, command=command, queue=queue, executor_config=executor_config) diff --git a/airflow/executors/sequential_executor.py b/airflow/executors/sequential_executor.py index 906e117d1aa31..18a4747790c45 100644 --- a/airflow/executors/sequential_executor.py +++ b/airflow/executors/sequential_executor.py @@ -49,10 +49,7 @@ def execute_async(self, command: CommandType, queue: Optional[str] = None, executor_config: Optional[Any] = None) -> None: - - if command[0:3] != ["airflow", "tasks", "run"]: - raise ValueError('The command must start with ["airflow", "tasks", "run"].') - + self.validate_command(command) self.commands_to_run.append((key, command)) def sync(self) -> None: diff --git a/airflow/jobs/scheduler_job.py b/airflow/jobs/scheduler_job.py index f7a2a537a3fd9..90b2e1cc337e6 100644 --- a/airflow/jobs/scheduler_job.py +++ b/airflow/jobs/scheduler_job.py @@ -982,7 +982,7 @@ def _prepare_simple_dags( return simple_dags -class SchedulerJob(BaseJob): +class SchedulerJob(BaseJob): # pylint: disable=too-many-instance-attributes """ This SchedulerJob runs for a specific time interval and schedules the jobs that are ready to run. It figures out the latest runs for each @@ -1544,7 +1544,7 @@ def _process_executor_events(self, simple_dag_bag: SimpleDagBag, session: Sessio if not tis_with_right_state: return - # Check state of finishes tasks + # Check state of finished tasks filter_for_tis = TI.filter_for_tis(tis_with_right_state) tis: List[TI] = session.query(TI).filter(filter_for_tis).all() for ti in tis: @@ -1555,17 +1555,14 @@ def _process_executor_events(self, simple_dag_bag: SimpleDagBag, session: Sessio # TODO: should we fail RUNNING as well, as we do in Backfills? if ti.try_number == buffer_key.try_number and ti.state == State.QUEUED: Stats.incr('scheduler.tasks.killed_externally') - self.log.error( - "Executor reports task instance %s finished (%s) although the task says its %s. " - "(Info: %s) Was the task killed externally?", - ti, state, ti.state, info - ) + msg = "Executor reports task instance %s finished (%s) although the " \ + "task says its %s. (Info: %s) Was the task killed externally?" + self.log.error(msg, ti, state, ti.state, info) simple_dag = simple_dag_bag.get_dag(ti.dag_id) self.processor_agent.send_callback_to_execute( full_filepath=simple_dag.full_filepath, task_instance=ti, - msg=f"Executor reports task instance finished ({state}) although the " - f"task says its {ti.state}. (Info: {info}) Was the task killed externally?" + msg=msg % (ti, state, ti.state, info), ) def _execute(self) -> None: diff --git a/airflow/migrations/versions/8d48763f6d53_add_unique_constraint_to_conn_id.py b/airflow/migrations/versions/8d48763f6d53_add_unique_constraint_to_conn_id.py new file mode 100644 index 0000000000000..34ce9e5aa5679 --- /dev/null +++ b/airflow/migrations/versions/8d48763f6d53_add_unique_constraint_to_conn_id.py @@ -0,0 +1,67 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""add unique constraint to conn_id + +Revision ID: 8d48763f6d53 +Revises: 8f966b9c467a +Create Date: 2020-05-03 16:55:01.834231 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = '8d48763f6d53' +down_revision = '8f966b9c467a' +branch_labels = None +depends_on = None + + +def upgrade(): + """Apply add unique constraint to conn_id and set it as non-nullable""" + try: + with op.batch_alter_table('connection') as batch_op: + batch_op.create_unique_constraint( + constraint_name="unique_conn_id", + columns=["conn_id"] + ) + + batch_op.alter_column( + "conn_id", + nullable=False, + existing_type=sa.String(250) + ) + except sa.exc.IntegrityError: + raise Exception("Make sure there are no duplicate connections with the same conn_id or null values") + + +def downgrade(): + """Unapply add unique constraint to conn_id and set it as non-nullable""" + with op.batch_alter_table('connection') as batch_op: + batch_op.drop_constraint( + constraint_name="unique_conn_id", + type_="unique" + ) + + batch_op.alter_column( + "conn_id", + nullable=True, + existing_type=sa.String(250) + ) diff --git a/airflow/models/connection.py b/airflow/models/connection.py index 75174c41784cc..37c3d1b1b779e 100644 --- a/airflow/models/connection.py +++ b/airflow/models/connection.py @@ -146,7 +146,7 @@ class Connection(Base, LoggingMixin): __tablename__ = "connection" id = Column(Integer(), primary_key=True) - conn_id = Column(String(ID_LEN)) + conn_id = Column(String(ID_LEN), unique=True, nullable=False) conn_type = Column(String(500), nullable=False) host = Column(String(500)) schema = Column(String(500)) diff --git a/airflow/models/dag.py b/airflow/models/dag.py index dfb6409c69003..222910adbb546 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -47,7 +47,8 @@ from airflow.models.dagcode import DagCode from airflow.models.dagpickle import DagPickle from airflow.models.dagrun import DagRun -from airflow.models.taskinstance import TaskInstance, clear_task_instances +from airflow.models.taskinstance import Context, TaskInstance, clear_task_instances +from airflow.stats import Stats from airflow.utils import timezone from airflow.utils.dates import cron_presets, date_range as utils_date_range from airflow.utils.file import correct_maybe_zipped @@ -64,6 +65,8 @@ DEFAULT_VIEW_PRESETS = ['tree', 'graph', 'duration', 'gantt', 'landing_times'] ORIENTATION_PRESETS = ['LR', 'TB', 'RL', 'BT'] +DagStateChangeCallback = Callable[[Context], None] + def get_last_dagrun(dag_id, session, include_externally_triggered=False): """ @@ -226,8 +229,8 @@ def __init__( default_view: str = conf.get('webserver', 'dag_default_view').lower(), orientation: str = conf.get('webserver', 'dag_orientation'), catchup: bool = conf.getboolean('scheduler', 'catchup_by_default'), - on_success_callback: Optional[Callable] = None, - on_failure_callback: Optional[Callable] = None, + on_success_callback: Optional[DagStateChangeCallback] = None, + on_failure_callback: Optional[DagStateChangeCallback] = None, doc_md: Optional[str] = None, params: Optional[Dict] = None, access_control: Optional[Dict] = None, @@ -680,13 +683,17 @@ def handle_callback(self, dagrun, success=True, reason=None, session=None): """ callback = self.on_success_callback if success else self.on_failure_callback if callback: - self.log.info('Executing dag callback function: {}'.format(callback)) + self.log.info('Executing dag callback function: %s', callback) tis = dagrun.get_task_instances() ti = tis[-1] # get first TaskInstance of DagRun ti.task = self.get_task(ti.task_id) context = ti.get_template_context(session=session) context.update({'reason': reason}) - callback(context) + try: + callback(context) + except Exception: + self.log.exception("failed to invoke dag state update callback") + Stats.incr("dag.callback_exceptions") def get_active_runs(self): """ diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index 1c7657f4b57d9..242b9dbbd7baf 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -21,6 +21,7 @@ from sqlalchemy import ( Boolean, Column, DateTime, Index, Integer, PickleType, String, UniqueConstraint, and_, func, or_, ) +from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.declarative import declared_attr from sqlalchemy.orm import synonym from sqlalchemy.orm.session import Session @@ -439,10 +440,10 @@ def verify_integrity(self, session=None): tis = self.get_task_instances(session=session) # check for removed or restored tasks - task_ids = [] + task_ids = set() for ti in tis: task_instance_mutation_hook(ti) - task_ids.append(ti.task_id) + task_ids.add(ti.task_id) task = None try: task = dag.get_task(ti.task_id) @@ -450,16 +451,16 @@ def verify_integrity(self, session=None): if ti.state == State.REMOVED: pass # ti has already been removed, just ignore it elif self.state is not State.RUNNING and not dag.partial: - self.log.warning("Failed to get task '{}' for dag '{}'. " - "Marking it as removed.".format(ti, dag)) + self.log.warning("Failed to get task '%s' for dag '%s'. " + "Marking it as removed.", ti, dag) Stats.incr( "task_removed_from_dag.{}".format(dag.dag_id), 1, 1) ti.state = State.REMOVED should_restore_task = (task is not None) and ti.state == State.REMOVED if should_restore_task: - self.log.info("Restoring task '{}' which was previously " - "removed from DAG '{}'".format(ti, dag)) + self.log.info("Restoring task '%s' which was previously " + "removed from DAG '%s'", ti, dag) Stats.incr("task_restored_to_dag.{}".format(dag.dag_id), 1, 1) ti.state = State.NONE session.merge(ti) @@ -477,7 +478,14 @@ def verify_integrity(self, session=None): task_instance_mutation_hook(ti) session.add(ti) - session.commit() + try: + session.commit() + except IntegrityError as err: + self.log.info(str(err)) + self.log.info('Hit IntegrityError while creating the TIs for ' + f'{dag.dag_id} - {self.execution_date}.') + self.log.info('Doing session rollback.') + session.rollback() @staticmethod def get_run(session, dag_id, execution_date): diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index 632f4c567d59b..d07f91875de62 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -66,13 +66,14 @@ from airflow.utils.timeout import timeout TR = TaskReschedule +Context = Dict[str, Any] -_CURRENT_CONTEXT = [] +_CURRENT_CONTEXT: List[Context] = [] log = logging.getLogger(__name__) @contextlib.contextmanager -def set_current_context(context: Dict[str, Any]): +def set_current_context(context: Context): """ Sets the current execution context to the provided context object. This method should be called once per Task execution, before calling operator.execute. @@ -1166,8 +1167,8 @@ def signal_handler(signum, frame): # pylint: disable=unused-argument self.render_templates(context=context) if STORE_SERIALIZED_DAGS: - RTIF.write(RTIF(ti=self, render_templates=False), session=session) - RTIF.delete_old_records(self.task_id, self.dag_id, session=session) + RTIF.write(RTIF(ti=self, render_templates=False)) + RTIF.delete_old_records(self.task_id, self.dag_id) # Export context to make it available for operators to use. airflow_context_vars = context_to_airflow_vars(context, in_env_var_format=True) @@ -1391,7 +1392,7 @@ def _safe_date(self, date_attr, fmt): return '' @provide_session - def get_template_context(self, session=None) -> Dict[str, Any]: # pylint: disable=too-many-locals + def get_template_context(self, session=None) -> Context: # pylint: disable=too-many-locals """Return TI Context""" task = self.task from airflow import macros @@ -1583,7 +1584,7 @@ def overwrite_params_with_dag_run_conf(self, params, dag_run): self.log.debug("Updating task params (%s) with DagRun.conf (%s)", params, dag_run.conf) params.update(dag_run.conf) - def render_templates(self, context: Optional[Dict] = None) -> None: + def render_templates(self, context: Optional[Context] = None) -> None: """Render templates in the operator fields.""" if not context: context = self.get_template_context() diff --git a/airflow/providers/email/operators/email.py b/airflow/operators/email.py similarity index 95% rename from airflow/providers/email/operators/email.py rename to airflow/operators/email.py index 8bd8956648011..80d11310d1a13 100644 --- a/airflow/providers/email/operators/email.py +++ b/airflow/operators/email.py @@ -51,8 +51,8 @@ class EmailOperator(BaseOperator): ui_color = '#e6faf9' @apply_defaults - def __init__( - self, + def __init__( # pylint: disable=invalid-name + self, *, to: Union[List[str], str], subject: str, html_content: str, @@ -61,8 +61,8 @@ def __init__( bcc: Optional[Union[List[str], str]] = None, mime_subtype: str = 'mixed', mime_charset: str = 'utf-8', - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.to = to # pylint: disable=invalid-name self.subject = subject self.html_content = html_content diff --git a/airflow/operators/email_operator.py b/airflow/operators/email_operator.py index 7a5459be9d023..135eb68f13f0b 100644 --- a/airflow/operators/email_operator.py +++ b/airflow/operators/email_operator.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.email.operators.email`.""" +"""This module is deprecated. Please use `airflow.operators.email`.""" import warnings # pylint: disable=unused-import -from airflow.providers.email.operators.email import EmailOperator # noqa +from airflow.operators.email import EmailOperator # noqa warnings.warn( - "This module is deprecated. Please use `airflow.providers.email.operators.email`.", + "This module is deprecated. Please use `airflow.operators.email`.", DeprecationWarning, stacklevel=2 ) diff --git a/airflow/operators/python.py b/airflow/operators/python.py index 92cb2bb2f0f11..05f2f0b28870c 100644 --- a/airflow/operators/python.py +++ b/airflow/operators/python.py @@ -26,7 +26,7 @@ from itertools import islice from tempfile import TemporaryDirectory from textwrap import dedent -from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, TypeVar, cast +from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, TypeVar, Union, cast import dill @@ -38,7 +38,7 @@ from airflow.models.xcom_arg import XComArg from airflow.utils.decorators import apply_defaults from airflow.utils.process_utils import execute_in_subprocess -from airflow.utils.python_virtualenv import prepare_virtualenv +from airflow.utils.python_virtualenv import prepare_virtualenv, write_python_script class PythonOperator(BaseOperator): @@ -363,6 +363,10 @@ class PythonVirtualenvOperator(PythonOperator): Note that if your virtualenv runs in a different Python major version than Airflow, you cannot use return values, op_args, or op_kwargs. You can use string_args though. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:PythonVirtualenvOperator` + :param python_callable: A python function with no references to outside variables, defined with def, which will be run in a virtualenv :type python_callable: function @@ -370,7 +374,7 @@ class PythonVirtualenvOperator(PythonOperator): :type requirements: list[str] :param python_version: The Python version to run the virtualenv with. Note that both 2 and 2.7 are acceptable forms. - :type python_version: str + :type python_version: Optional[Union[str, int, float]] :param use_dill: Whether to use dill to serialize the args and result (pickle is default). This allow more complex types but requires you to include dill in your requirements. @@ -397,13 +401,48 @@ class PythonVirtualenvOperator(PythonOperator): :type templates_exts: list[str] """ + BASE_SERIALIZABLE_CONTEXT_KEYS = { + 'ds_nodash', + 'inlets', + 'next_ds', + 'next_ds_nodash', + 'outlets', + 'params', + 'prev_ds', + 'prev_ds_nodash', + 'run_id', + 'task_instance_key_str', + 'test_mode', + 'tomorrow_ds', + 'tomorrow_ds_nodash', + 'ts', + 'ts_nodash', + 'ts_nodash_with_tz', + 'yesterday_ds', + 'yesterday_ds_nodash' + } + PENDULUM_SERIALIZABLE_CONTEXT_KEYS = { + 'execution_date', + 'next_execution_date', + 'prev_execution_date', + 'prev_execution_date_success', + 'prev_start_date_success' + } + AIRFLOW_SERIALIZABLE_CONTEXT_KEYS = { + 'macros', + 'conf', + 'dag', + 'dag_run', + 'task' + } + @apply_defaults def __init__( # pylint: disable=too-many-arguments self, *, python_callable: Callable, requirements: Optional[Iterable[str]] = None, - python_version: Optional[str] = None, + python_version: Optional[Union[str, int, float]] = None, use_dill: bool = False, system_site_packages: bool = True, op_args: Optional[List] = None, @@ -413,6 +452,17 @@ def __init__( # pylint: disable=too-many-arguments templates_exts: Optional[List[str]] = None, **kwargs ): + if ( + not isinstance(python_callable, types.FunctionType) or + isinstance(python_callable, types.LambdaType) and python_callable.__name__ == "" + ): + raise AirflowException('PythonVirtualenvOperator only supports functions for python_callable arg') + if ( + python_version and str(python_version)[0] != str(sys.version_info.major) and + (op_args or op_kwargs) + ): + raise AirflowException("Passing op_args or op_kwargs is not supported across different Python " + "major versions for PythonVirtualenvOperator. Please use string_args.") super().__init__( python_callable=python_callable, op_args=op_args, @@ -420,144 +470,93 @@ def __init__( # pylint: disable=too-many-arguments templates_dict=templates_dict, templates_exts=templates_exts, **kwargs) - self.requirements = requirements or [] + self.requirements = list(requirements or []) self.string_args = string_args or [] self.python_version = python_version self.use_dill = use_dill self.system_site_packages = system_site_packages - # check that dill is present if needed - dill_in_requirements = map(lambda x: x.lower().startswith('dill'), - self.requirements) - if (not system_site_packages) and use_dill and not any(dill_in_requirements): - raise AirflowException('If using dill, dill must be in the environment ' + - 'either via system_site_packages or requirements') - # check that a function is passed, and that it is not a lambda - if (not isinstance(self.python_callable, - types.FunctionType) or (self.python_callable.__name__ == - (lambda x: 0).__name__)): - raise AirflowException('{} only supports functions for python_callable arg'.format( - self.__class__.__name__)) - # check that args are passed iff python major version matches - if (python_version is not None and - str(python_version)[0] != str(sys.version_info[0]) and - self._pass_op_args()): - raise AirflowException("Passing op_args or op_kwargs is not supported across " - "different Python major versions " - "for PythonVirtualenvOperator. " - "Please use string_args.") + if not self.system_site_packages and self.use_dill and 'dill' not in self.requirements: + self.requirements.append('dill') + self.pickling_library = dill if self.use_dill else pickle + + def execute(self, context: Dict): + serializable_context = {key: context[key] for key in self._get_serializable_context_keys()} + super().execute(context=serializable_context) def execute_callable(self): with TemporaryDirectory(prefix='venv') as tmp_dir: if self.templates_dict: self.op_kwargs['templates_dict'] = self.templates_dict - # generate filenames + input_filename = os.path.join(tmp_dir, 'script.in') output_filename = os.path.join(tmp_dir, 'script.out') string_args_filename = os.path.join(tmp_dir, 'string_args.txt') script_filename = os.path.join(tmp_dir, 'script.py') - # set up virtualenv - python_bin = 'python' + str(self.python_version) if self.python_version else None prepare_virtualenv( venv_directory=tmp_dir, - python_bin=python_bin, + python_bin=f'python{self.python_version}' if self.python_version else None, system_site_packages=self.system_site_packages, - requirements=self.requirements, + requirements=self.requirements ) self._write_args(input_filename) - self._write_script(script_filename) self._write_string_args(string_args_filename) + write_python_script( + jinja_context=dict( + op_args=self.op_args, + op_kwargs=self.op_kwargs, + pickling_library=self.pickling_library.__name__, + python_callable=self.python_callable.__name__, + python_callable_source=dedent(inspect.getsource(self.python_callable)) + ), + filename=script_filename + ) + + execute_in_subprocess(cmd=[ + f'{tmp_dir}/bin/python', + script_filename, + input_filename, + output_filename, + string_args_filename + ]) - # execute command in virtualenv - execute_in_subprocess( - self._generate_python_cmd(tmp_dir, - script_filename, - input_filename, - output_filename, - string_args_filename)) return self._read_result(output_filename) - def _pass_op_args(self): - # we should only pass op_args if any are given to us - return len(self.op_args) + len(self.op_kwargs) > 0 + def _write_args(self, filename): + if self.op_args or self.op_kwargs: + with open(filename, 'wb') as file: + self.pickling_library.dump({'args': self.op_args, 'kwargs': self.op_kwargs}, file) + + def _get_serializable_context_keys(self): + def _is_airflow_env(): + return self.system_site_packages or 'apache-airflow' in self.requirements + + def _is_pendulum_env(): + return 'pendulum' in self.requirements and 'lazy_object_proxy' in self.requirements + + serializable_context_keys = self.BASE_SERIALIZABLE_CONTEXT_KEYS.copy() + if _is_airflow_env(): + serializable_context_keys.update(self.AIRFLOW_SERIALIZABLE_CONTEXT_KEYS) + if _is_pendulum_env() or _is_airflow_env(): + serializable_context_keys.update(self.PENDULUM_SERIALIZABLE_CONTEXT_KEYS) + return serializable_context_keys def _write_string_args(self, filename): - # writes string_args to a file, which are read line by line with open(filename, 'w') as file: file.write('\n'.join(map(str, self.string_args))) - def _write_args(self, input_filename): - # serialize args to file - if self._pass_op_args(): - with open(input_filename, 'wb') as file: - arg_dict = ({'args': self.op_args, 'kwargs': self.op_kwargs}) - if self.use_dill: - dill.dump(arg_dict, file) - else: - pickle.dump(arg_dict, file) - - def _read_result(self, output_filename): - if os.stat(output_filename).st_size == 0: + def _read_result(self, filename): + if os.stat(filename).st_size == 0: return None - with open(output_filename, 'rb') as file: + with open(filename, 'rb') as file: try: - if self.use_dill: - return dill.load(file) - else: - return pickle.load(file) + return self.pickling_library.load(file) except ValueError: - self.log.error("Error deserializing result. " - "Note that result deserialization " + self.log.error("Error deserializing result. Note that result deserialization " "is not supported across major Python versions.") raise - def _write_script(self, script_filename): - with open(script_filename, 'w') as file: - python_code = self._generate_python_code() - self.log.debug('Writing code to file\n %s', python_code) - file.write(python_code) - - @staticmethod - def _generate_python_cmd(tmp_dir, script_filename, - input_filename, output_filename, string_args_filename): - # direct path alleviates need to activate - return ['{}/bin/python'.format(tmp_dir), script_filename, - input_filename, output_filename, string_args_filename] - - def _generate_python_code(self): - if self.use_dill: - pickling_library = 'dill' - else: - pickling_library = 'pickle' - - # dont try to read pickle if we didnt pass anything - if self._pass_op_args(): - load_args_line = 'with open(sys.argv[1], "rb") as file: arg_dict = {}.load(file)' \ - .format(pickling_library) - else: - load_args_line = 'arg_dict = {"args": [], "kwargs": {}}' - - # no indents in original code so we can accept - # any type of indents in the original function - # we deserialize args, call function, serialize result if necessary - return dedent("""\ - import {pickling_library} - import sys - {load_args_code} - args = arg_dict["args"] - kwargs = arg_dict["kwargs"] - with open(sys.argv[3], 'r') as file: - virtualenv_string_args = list(map(lambda x: x.strip(), list(file))) - {python_callable_lines} - res = {python_callable_name}(*args, **kwargs) - with open(sys.argv[2], 'wb') as file: - res is not None and {pickling_library}.dump(res, file) - """).format(load_args_code=load_args_line, - python_callable_lines=dedent(inspect.getsource(self.python_callable)), - python_callable_name=self.python_callable.__name__, - pickling_library=pickling_library) - def get_current_context() -> Dict[str, Any]: """ diff --git a/airflow/operators/sql.py b/airflow/operators/sql.py index ee2d5475dc27b..1c29349f5f370 100644 --- a/airflow/operators/sql.py +++ b/airflow/operators/sql.py @@ -152,12 +152,11 @@ class SQLValueCheckOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, sql: str, pass_value: Any, tolerance: Any = None, conn_id: Optional[str] = None, - *args, **kwargs, ): super().__init__(**kwargs) @@ -272,7 +271,7 @@ class SQLIntervalCheckOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, table: str, metrics_thresholds: Dict[str, int], date_filter_column: Optional[str] = "ds", @@ -280,7 +279,6 @@ def __init__( ratio_formula: Optional[str] = "max_over_min", ignore_zero: Optional[bool] = True, conn_id: Optional[str] = None, - *args, **kwargs, ): super().__init__(**kwargs) @@ -417,12 +415,11 @@ class SQLThresholdCheckOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, sql: str, min_threshold: Any, max_threshold: Any, conn_id: Optional[str] = None, - *args, **kwargs, ): super().__init__(**kwargs) @@ -510,14 +507,13 @@ class BranchSQLOperator(BaseOperator, SkipMixin): @apply_defaults def __init__( - self, + self, *, sql: str, follow_task_ids_if_true: List[str], follow_task_ids_if_false: List[str], conn_id: str = "default_conn_id", database: Optional[str] = None, parameters: Optional[Union[Mapping, Iterable]] = None, - *args, **kwargs, ) -> None: super().__init__(**kwargs) diff --git a/airflow/providers/amazon/aws/example_dags/example_datasync_1.py b/airflow/providers/amazon/aws/example_dags/example_datasync_1.py index 9f0a5fa4fe5c6..5e1127aa84684 100644 --- a/airflow/providers/amazon/aws/example_dags/example_datasync_1.py +++ b/airflow/providers/amazon/aws/example_dags/example_datasync_1.py @@ -45,12 +45,11 @@ "DESTINATION_LOCATION_URI", "s3://mybucket/prefix") # [END howto_operator_datasync_1_args_2] -default_args = {"start_date": days_ago(1)} with models.DAG( "example_datasync_1_1", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: @@ -64,7 +63,7 @@ with models.DAG( "example_datasync_1_2", - default_args=default_args, + start_date=days_ago(1), schedule_interval=None, # Override to match your needs ) as dag: # [START howto_operator_datasync_1_2] diff --git a/airflow/providers/amazon/aws/example_dags/example_datasync_2.py b/airflow/providers/amazon/aws/example_dags/example_datasync_2.py index b84a00e35a010..c6b8e0eb51917 100644 --- a/airflow/providers/amazon/aws/example_dags/example_datasync_2.py +++ b/airflow/providers/amazon/aws/example_dags/example_datasync_2.py @@ -77,13 +77,12 @@ getenv("UPDATE_TASK_KWARGS", default_update_task_kwargs) ) -default_args = {"start_date": days_ago(1)} # [END howto_operator_datasync_2_args] with models.DAG( "example_datasync_2", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: diff --git a/airflow/providers/amazon/aws/example_dags/example_ecs_fargate.py b/airflow/providers/amazon/aws/example_dags/example_ecs_fargate.py index 4c75d8f602352..94cecba7f9269 100644 --- a/airflow/providers/amazon/aws/example_dags/example_ecs_fargate.py +++ b/airflow/providers/amazon/aws/example_dags/example_ecs_fargate.py @@ -31,7 +31,6 @@ DEFAULT_ARGS = { "owner": "airflow", "depends_on_past": False, - "start_date": datetime.datetime(2020, 1, 1), "email": ["airflow@example.com"], "email_on_failure": False, "email_on_retry": False, @@ -42,6 +41,7 @@ default_args=DEFAULT_ARGS, default_view="graph", schedule_interval=None, + start_date=datetime.datetime(2020, 1, 1), tags=["example"], ) # generate dag documentation diff --git a/airflow/providers/amazon/aws/example_dags/example_emr_job_flow_automatic_steps.py b/airflow/providers/amazon/aws/example_dags/example_emr_job_flow_automatic_steps.py index c2141dca8dfff..3c52ffca58f07 100644 --- a/airflow/providers/amazon/aws/example_dags/example_emr_job_flow_automatic_steps.py +++ b/airflow/providers/amazon/aws/example_dags/example_emr_job_flow_automatic_steps.py @@ -28,7 +28,6 @@ DEFAULT_ARGS = { 'owner': 'airflow', 'depends_on_past': False, - 'start_date': days_ago(2), 'email': ['airflow@example.com'], 'email_on_failure': False, 'email_on_retry': False @@ -76,6 +75,7 @@ dag_id='emr_job_flow_automatic_steps_dag', default_args=DEFAULT_ARGS, dagrun_timeout=timedelta(hours=2), + start_date=days_ago(2), schedule_interval='0 3 * * *', tags=['example'], ) as dag: diff --git a/airflow/providers/amazon/aws/example_dags/example_emr_job_flow_manual_steps.py b/airflow/providers/amazon/aws/example_dags/example_emr_job_flow_manual_steps.py index b5d9882e5708f..0b73bd3366110 100644 --- a/airflow/providers/amazon/aws/example_dags/example_emr_job_flow_manual_steps.py +++ b/airflow/providers/amazon/aws/example_dags/example_emr_job_flow_manual_steps.py @@ -33,7 +33,6 @@ DEFAULT_ARGS = { 'owner': 'airflow', 'depends_on_past': False, - 'start_date': days_ago(2), 'email': ['airflow@example.com'], 'email_on_failure': False, 'email_on_retry': False @@ -78,6 +77,7 @@ dag_id='emr_job_flow_manual_steps_dag', default_args=DEFAULT_ARGS, dagrun_timeout=timedelta(hours=2), + start_date=days_ago(2), schedule_interval='0 3 * * *', tags=['example'], ) as dag: diff --git a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py index 057f29b115b95..f05c5ae225cde 100644 --- a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py +++ b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py @@ -51,8 +51,6 @@ YOUTUBE_VIDEO_FIELDS = getenv("YOUTUBE_VIDEO_FIELDS", "items(id,snippet(description,publishedAt,tags,title))") # [END howto_operator_google_api_to_s3_transfer_advanced_env_variables] -default_args = {"start_date": days_ago(1)} - # pylint: disable=unused-argument # [START howto_operator_google_api_to_s3_transfer_advanced_task_1_2] @@ -74,8 +72,8 @@ def _check_and_transform_video_ids(xcom_key, task_ids, task_instance, **kwargs): with DAG( dag_id="example_google_api_to_s3_transfer_advanced", - default_args=default_args, schedule_interval=None, + start_date=days_ago(1), tags=['example'] ) as dag: # [START howto_operator_google_api_to_s3_transfer_advanced_task_1] diff --git a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_basic.py b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_basic.py index 07290ca4c2993..515a9661bb224 100644 --- a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_basic.py +++ b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_basic.py @@ -32,12 +32,11 @@ S3_DESTINATION_KEY = getenv("S3_DESTINATION_KEY", "s3://bucket/key.json") # [END howto_operator_google_api_to_s3_transfer_basic_env_variables] -default_args = {"start_date": days_ago(1)} with DAG( dag_id="example_google_api_to_s3_transfer_basic", - default_args=default_args, schedule_interval=None, + start_date=days_ago(1), tags=['example'] ) as dag: # [START howto_operator_google_api_to_s3_transfer_basic_task_1] diff --git a/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py b/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py index 636d360f4acb5..0c308ba1360da 100644 --- a/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py +++ b/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py @@ -33,11 +33,9 @@ S3_DESTINATION_KEY = getenv("S3_DESTINATION_KEY", "s3://bucket/key.json") # [END howto_operator_imap_attachment_to_s3_env_variables] -default_args = {"start_date": days_ago(1)} - with DAG( dag_id="example_imap_attachment_to_s3", - default_args=default_args, + start_date=days_ago(1), schedule_interval=None, tags=['example'] ) as dag: diff --git a/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py b/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py index 5086621c052ab..2ffccbc593736 100644 --- a/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py +++ b/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py @@ -34,8 +34,6 @@ REDSHIFT_TABLE = getenv("REDSHIFT_TABLE", "test_table") # [END howto_operator_s3_to_redshift_env_variables] -default_args = {"start_date": days_ago(1)} - def _add_sample_data_to_s3(): s3_hook = S3Hook() @@ -50,7 +48,7 @@ def _remove_sample_data_from_s3(): with DAG( dag_id="example_s3_to_redshift", - default_args=default_args, + start_date=days_ago(1), schedule_interval=None, tags=['example'] ) as dag: diff --git a/airflow/providers/amazon/aws/hooks/athena.py b/airflow/providers/amazon/aws/hooks/athena.py index 4bd33d34f2ca7..830bb8f3e577c 100644 --- a/airflow/providers/amazon/aws/hooks/athena.py +++ b/airflow/providers/amazon/aws/hooks/athena.py @@ -20,6 +20,9 @@ This module contains AWS Athena hook """ from time import sleep +from typing import Any, Dict, Optional + +from botocore.paginate import PageIterator from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook @@ -42,12 +45,18 @@ class AWSAthenaHook(AwsBaseHook): FAILURE_STATES = ('FAILED', 'CANCELLED',) SUCCESS_STATES = ('SUCCEEDED',) - def __init__(self, *args, sleep_time=30, **kwargs): - super().__init__(client_type='athena', *args, **kwargs) + def __init__(self, + *args: Any, + sleep_time: int = 30, + **kwargs: Any) -> None: + super().__init__(client_type='athena', *args, **kwargs) # type: ignore self.sleep_time = sleep_time - def run_query(self, query, query_context, result_configuration, client_request_token=None, - workgroup='primary'): + def run_query(self, query: str, + query_context: Dict[str, str], + result_configuration: Dict[str, Any], + client_request_token: Optional[str] = None, + workgroup: str = 'primary') -> str: """ Run Presto query on athena with provided config and return submitted query_execution_id @@ -75,7 +84,7 @@ def run_query(self, query, query_context, result_configuration, client_request_t query_execution_id = response['QueryExecutionId'] return query_execution_id - def check_query_status(self, query_execution_id): + def check_query_status(self, query_execution_id: str) -> Optional[str]: """ Fetch the status of submitted athena query. Returns None or one of valid query states. @@ -94,7 +103,7 @@ def check_query_status(self, query_execution_id): # The error is being absorbed to implement retries. return state # pylint: disable=lost-exception - def get_state_change_reason(self, query_execution_id): + def get_state_change_reason(self, query_execution_id: str) -> Optional[str]: """ Fetch the reason for a state change (e.g. error message). Returns None or reason string. @@ -113,7 +122,9 @@ def get_state_change_reason(self, query_execution_id): # The error is being absorbed to implement retries. return reason # pylint: disable=lost-exception - def get_query_results(self, query_execution_id, next_token_id=None, max_results=1000): + def get_query_results(self, query_execution_id: str, + next_token_id: Optional[str] = None, + max_results: int = 1000) -> Optional[dict]: """ Fetch submitted athena query results. returns none if query is in intermediate state or failed/cancelled state else dict of query output @@ -141,8 +152,11 @@ def get_query_results(self, query_execution_id, next_token_id=None, max_results= result_params['NextToken'] = next_token_id return self.get_conn().get_query_results(**result_params) - def get_query_results_paginator(self, query_execution_id, max_items=None, - page_size=None, starting_token=None): + def get_query_results_paginator(self, query_execution_id: str, + max_items: Optional[int] = None, + page_size: Optional[int] = None, + starting_token: Optional[str] = None + ) -> Optional[PageIterator]: """ Fetch submitted athena query results. returns none if query is in intermediate state or failed/cancelled state else a paginator to iterate through pages of results. If you @@ -177,7 +191,8 @@ def get_query_results_paginator(self, query_execution_id, max_items=None, paginator = self.get_conn().get_paginator('get_query_results') return paginator.paginate(**result_params) - def poll_query_status(self, query_execution_id, max_tries=None): + def poll_query_status(self, query_execution_id: str, + max_tries: Optional[int] = None) -> Optional[str]: """ Poll the status of submitted athena query until query state reaches final state. Returns one of the final states @@ -211,7 +226,7 @@ def poll_query_status(self, query_execution_id, max_tries=None): sleep(self.sleep_time) return final_query_state - def stop_query(self, query_execution_id): + def stop_query(self, query_execution_id: str) -> Dict: """ Cancel the submitted athena query diff --git a/airflow/providers/amazon/aws/hooks/emr.py b/airflow/providers/amazon/aws/hooks/emr.py index 001374e86410e..c3364e75ad4f1 100644 --- a/airflow/providers/amazon/aws/hooks/emr.py +++ b/airflow/providers/amazon/aws/hooks/emr.py @@ -16,6 +16,8 @@ # specific language governing permissions and limitations # under the License. +from typing import Any, Dict, List, Optional + from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook @@ -36,7 +38,11 @@ def __init__(self, emr_conn_id=None, *args, **kwargs): self.emr_conn_id = emr_conn_id super().__init__(client_type='emr', *args, **kwargs) - def get_cluster_id_by_name(self, emr_cluster_name, cluster_states): + def get_cluster_id_by_name( + self, + emr_cluster_name: str, + cluster_states: List[str] + ) -> Optional[str]: """ Fetch id of EMR cluster with given name and (optional) states. Will return only if single id is found. @@ -65,7 +71,7 @@ def get_cluster_id_by_name(self, emr_cluster_name, cluster_states): self.log.info('No cluster found for name %s', emr_cluster_name) return None - def create_job_flow(self, job_flow_overrides): + def create_job_flow(self, job_flow_overrides: Dict[str, Any]) -> Dict[str, Any]: """ Creates a job flow using the config from the EMR connection. Keys of the json extra hash may have the arguments of the boto3 diff --git a/airflow/providers/amazon/aws/hooks/s3.py b/airflow/providers/amazon/aws/hooks/s3.py index 3e4b88b1534e3..710a4bc123f5d 100644 --- a/airflow/providers/amazon/aws/hooks/s3.py +++ b/airflow/providers/amazon/aws/hooks/s3.py @@ -27,10 +27,12 @@ import shutil from functools import wraps from inspect import signature +from io import BytesIO from tempfile import NamedTemporaryFile -from typing import Callable, Optional, TypeVar, cast +from typing import Any, Callable, Dict, Optional, Tuple, TypeVar, Union, cast from urllib.parse import urlparse +from boto3.s3.transfer import S3Transfer from botocore.exceptions import ClientError from airflow.exceptions import AirflowException @@ -49,7 +51,7 @@ def provide_bucket_name(func: T) -> T: function_signature = signature(func) @wraps(func) - def wrapper(*args, **kwargs): + def wrapper(*args, **kwargs) -> T: bound_args = function_signature.bind(*args, **kwargs) if 'bucket_name' not in bound_args.arguments: @@ -73,10 +75,10 @@ def unify_bucket_name_and_key(func: T) -> T: function_signature = signature(func) @wraps(func) - def wrapper(*args, **kwargs): + def wrapper(*args, **kwargs) -> T: bound_args = function_signature.bind(*args, **kwargs) - def get_key_name(): + def get_key_name() -> Optional[str]: if 'wildcard_key' in bound_args.arguments: return 'wildcard_key' if 'key' in bound_args.arguments: @@ -108,7 +110,7 @@ def __init__(self, *args, **kwargs): super().__init__(client_type='s3', *args, **kwargs) @staticmethod - def parse_s3_url(s3url): + def parse_s3_url(s3url: str) -> Tuple[str, str]: """ Parses the S3 Url into a bucket name and key. @@ -128,7 +130,7 @@ def parse_s3_url(s3url): return bucket_name, key @provide_bucket_name - def check_for_bucket(self, bucket_name=None): + def check_for_bucket(self, bucket_name: Optional[str] = None) -> bool: """ Check if bucket_name exists. @@ -145,7 +147,7 @@ def check_for_bucket(self, bucket_name=None): return False @provide_bucket_name - def get_bucket(self, bucket_name=None): + def get_bucket(self, bucket_name: Optional[str] = None) -> str: """ Returns a boto3.S3.Bucket object @@ -158,7 +160,9 @@ def get_bucket(self, bucket_name=None): return s3_resource.Bucket(bucket_name) @provide_bucket_name - def create_bucket(self, bucket_name=None, region_name=None): + def create_bucket(self, + bucket_name: Optional[str] = None, + region_name: Optional[str] = None) -> None: """ Creates an Amazon S3 bucket. @@ -178,7 +182,10 @@ def create_bucket(self, bucket_name=None, region_name=None): }) @provide_bucket_name - def check_for_prefix(self, prefix, delimiter, bucket_name=None): + def check_for_prefix(self, + prefix: str, + delimiter: str, + bucket_name: Optional[str] = None) -> bool: """ Checks that a prefix exists in a bucket @@ -198,8 +205,12 @@ def check_for_prefix(self, prefix, delimiter, bucket_name=None): return False if plist is None else prefix in plist @provide_bucket_name - def list_prefixes(self, bucket_name=None, prefix='', delimiter='', - page_size=None, max_items=None): + def list_prefixes(self, + bucket_name: Optional[str] = None, + prefix: Optional[str] = None, + delimiter: Optional[str] = None, + page_size: Optional[int] = None, + max_items: Optional[int] = None) -> Optional[list]: """ Lists prefixes in a bucket under prefix @@ -216,6 +227,8 @@ def list_prefixes(self, bucket_name=None, prefix='', delimiter='', :return: a list of matched prefixes and None if there are none. :rtype: list """ + prefix = prefix or '' + delimiter = delimiter or '' config = { 'PageSize': page_size, 'MaxItems': max_items, @@ -240,8 +253,12 @@ def list_prefixes(self, bucket_name=None, prefix='', delimiter='', return None @provide_bucket_name - def list_keys(self, bucket_name=None, prefix='', delimiter='', - page_size=None, max_items=None): + def list_keys(self, + bucket_name: Optional[str] = None, + prefix: Optional[str] = None, + delimiter: Optional[str] = None, + page_size: Optional[int] = None, + max_items: Optional[int] = None) -> Optional[list]: """ Lists keys in a bucket under prefix and not containing delimiter @@ -258,6 +275,8 @@ def list_keys(self, bucket_name=None, prefix='', delimiter='', :return: a list of matched keys and None if there are none. :rtype: list """ + prefix = prefix or '' + delimiter = delimiter or '' config = { 'PageSize': page_size, 'MaxItems': max_items, @@ -283,7 +302,7 @@ def list_keys(self, bucket_name=None, prefix='', delimiter='', @provide_bucket_name @unify_bucket_name_and_key - def check_for_key(self, key, bucket_name=None): + def check_for_key(self, key: str, bucket_name: Optional[str] = None) -> bool: """ Checks if a key exists in a bucket @@ -304,7 +323,7 @@ def check_for_key(self, key, bucket_name=None): @provide_bucket_name @unify_bucket_name_and_key - def get_key(self, key, bucket_name=None): + def get_key(self, key: str, bucket_name: Optional[str] = None) -> S3Transfer: """ Returns a boto3.s3.Object @@ -322,7 +341,7 @@ def get_key(self, key, bucket_name=None): @provide_bucket_name @unify_bucket_name_and_key - def read_key(self, key, bucket_name=None): + def read_key(self, key: str, bucket_name: Optional[str] = None) -> S3Transfer: """ Reads a key from S3 @@ -339,11 +358,13 @@ def read_key(self, key, bucket_name=None): @provide_bucket_name @unify_bucket_name_and_key - def select_key(self, key, bucket_name=None, - expression='SELECT * FROM S3Object', - expression_type='SQL', - input_serialization=None, - output_serialization=None): + def select_key(self, + key: str, + bucket_name: Optional[str] = None, + expression: Optional[str] = None, + expression_type: Optional[str] = None, + input_serialization: Optional[Dict[str, Any]] = None, + output_serialization: Optional[Dict[str, Any]] = None) -> str: """ Reads a key with S3 Select. @@ -366,6 +387,9 @@ def select_key(self, key, bucket_name=None, For more details about S3 Select parameters: http://boto3.readthedocs.io/en/latest/reference/services/s3.html#S3.Client.select_object_content """ + expression = expression or 'SELECT * FROM S3Object' + expression_type = expression_type or 'SQL' + if input_serialization is None: input_serialization = {'CSV': {}} if output_serialization is None: @@ -386,7 +410,9 @@ def select_key(self, key, bucket_name=None, @provide_bucket_name @unify_bucket_name_and_key def check_for_wildcard_key(self, - wildcard_key, bucket_name=None, delimiter=''): + wildcard_key: str, + bucket_name: Optional[str] = None, + delimiter: str = '') -> bool: """ Checks that a key matching a wildcard expression exists in a bucket @@ -405,7 +431,10 @@ def check_for_wildcard_key(self, @provide_bucket_name @unify_bucket_name_and_key - def get_wildcard_key(self, wildcard_key, bucket_name=None, delimiter=''): + def get_wildcard_key(self, + wildcard_key: str, + bucket_name: Optional[str] = None, + delimiter: str = '') -> S3Transfer: """ Returns a boto3.s3.Object object matching the wildcard expression @@ -430,13 +459,13 @@ def get_wildcard_key(self, wildcard_key, bucket_name=None, delimiter=''): @provide_bucket_name @unify_bucket_name_and_key def load_file(self, - filename, - key, - bucket_name=None, - replace=False, - encrypt=False, - gzip=False, - acl_policy=None): + filename: str, + key: str, + bucket_name: Optional[str] = None, + replace: bool = False, + encrypt: bool = False, + gzip: bool = False, + acl_policy: Optional[str] = None) -> None: """ Loads a local file to S3 @@ -482,13 +511,13 @@ def load_file(self, @provide_bucket_name @unify_bucket_name_and_key def load_string(self, - string_data, - key, - bucket_name=None, - replace=False, - encrypt=False, - encoding='utf-8', - acl_policy=None): + string_data: str, + key: str, + bucket_name: Optional[str] = None, + replace: bool = False, + encrypt: bool = False, + encoding: Optional[str] = None, + acl_policy: Optional[str] = None) -> None: """ Loads a string to S3 @@ -513,6 +542,8 @@ def load_string(self, object to be uploaded :type acl_policy: str """ + encoding = encoding or 'utf-8' + bytes_data = string_data.encode(encoding) file_obj = io.BytesIO(bytes_data) self._upload_file_obj(file_obj, key, bucket_name, replace, encrypt, acl_policy) @@ -521,12 +552,12 @@ def load_string(self, @provide_bucket_name @unify_bucket_name_and_key def load_bytes(self, - bytes_data, - key, - bucket_name=None, - replace=False, - encrypt=False, - acl_policy=None): + bytes_data: bytes, + key: str, + bucket_name: Optional[str] = None, + replace: bool = False, + encrypt: bool = False, + acl_policy: Optional[str] = None) -> None: """ Loads bytes to S3 @@ -556,12 +587,12 @@ def load_bytes(self, @provide_bucket_name @unify_bucket_name_and_key def load_file_obj(self, - file_obj, - key, - bucket_name=None, - replace=False, - encrypt=False, - acl_policy=None): + file_obj: BytesIO, + key: str, + bucket_name: Optional[str] = None, + replace: bool = False, + encrypt: bool = False, + acl_policy: Optional[str] = None) -> None: """ Loads a file object to S3 @@ -584,12 +615,12 @@ def load_file_obj(self, self._upload_file_obj(file_obj, key, bucket_name, replace, encrypt, acl_policy) def _upload_file_obj(self, - file_obj, - key, - bucket_name=None, - replace=False, - encrypt=False, - acl_policy=None): + file_obj: BytesIO, + key: str, + bucket_name: Optional[str] = None, + replace: bool = False, + encrypt: bool = False, + acl_policy: Optional[str] = None) -> None: if not replace and self.check_for_key(key, bucket_name): raise ValueError("The key {key} already exists.".format(key=key)) @@ -603,12 +634,12 @@ def _upload_file_obj(self, client.upload_fileobj(file_obj, bucket_name, key, ExtraArgs=extra_args) def copy_object(self, - source_bucket_key, - dest_bucket_key, - source_bucket_name=None, - dest_bucket_name=None, - source_version_id=None, - acl_policy='private'): + source_bucket_key: str, + dest_bucket_key: str, + source_bucket_name: Optional[str] = None, + dest_bucket_name: Optional[str] = None, + source_version_id: Optional[str] = None, + acl_policy: Optional[str] = None) -> None: """ Creates a copy of an object that is already stored in S3. @@ -640,6 +671,7 @@ def copy_object(self, object to be copied which is private by default. :type acl_policy: str """ + acl_policy = acl_policy or 'private' if dest_bucket_name is None: dest_bucket_name, dest_bucket_key = self.parse_s3_url(dest_bucket_key) @@ -688,7 +720,7 @@ def delete_bucket(self, bucket_name: str, force_delete: bool = False) -> None: Bucket=bucket_name ) - def delete_objects(self, bucket, keys): + def delete_objects(self, bucket: str, keys: Union[str, list]) -> None: """ Delete keys from the bucket. @@ -724,12 +756,10 @@ def delete_objects(self, bucket, keys): @provide_bucket_name @unify_bucket_name_and_key - def download_file( - self, - key: str, - bucket_name: Optional[str] = None, - local_path: Optional[str] = None - ) -> str: + def download_file(self, + key: str, + bucket_name: Optional[str] = None, + local_path: Optional[str] = None) -> str: """ Downloads a file from the S3 location to the local file system. @@ -755,7 +785,11 @@ def download_file( return local_tmp_file.name - def generate_presigned_url(self, client_method, params=None, expires_in=3600, http_method=None): + def generate_presigned_url(self, + client_method: str, + params: Optional[dict] = None, + expires_in: int = 3600, + http_method: Optional[str] = None) -> Optional[str]: """ Generate a presigned url given a client, its method, and arguments diff --git a/airflow/providers/amazon/aws/hooks/sagemaker.py b/airflow/providers/amazon/aws/hooks/sagemaker.py index d46d9d3e6245d..8aefa29cb8e3b 100644 --- a/airflow/providers/amazon/aws/hooks/sagemaker.py +++ b/airflow/providers/amazon/aws/hooks/sagemaker.py @@ -123,7 +123,7 @@ def secondary_training_status_message(job_description, prev_description): return '\n'.join(status_strs) -class SageMakerHook(AwsBaseHook): +class SageMakerHook(AwsBaseHook): # pylint: disable=too-many-public-methods """ Interact with Amazon SageMaker. @@ -400,6 +400,34 @@ def create_transform_job(self, config, wait_for_completion=True, ) return response + def create_processing_job(self, config, wait_for_completion=True, + check_interval=30, max_ingestion_time=None): + """ + Create a processing job + + :param config: the config for processing job + :type config: dict + :param wait_for_completion: if the program should keep running until job finishes + :type wait_for_completion: bool + :param check_interval: the time interval in seconds which the operator + will check the status of any SageMaker job + :type check_interval: int + :param max_ingestion_time: the maximum ingestion time in seconds. Any + SageMaker jobs that run longer than this will fail. Setting this to + None implies no timeout for any SageMaker job. + :type max_ingestion_time: int + :return: A response to transform job creation + """ + + response = self.get_conn().create_processing_job(**config) + if wait_for_completion: + self.check_status(config['ProcessingJobName'], + 'ProcessingJobStatus', + self.describe_processing_job, + check_interval, max_ingestion_time + ) + return response + def create_model(self, config): """ Create a model job @@ -579,6 +607,17 @@ def describe_transform_job(self, name): return self.get_conn().describe_transform_job(TransformJobName=name) + def describe_processing_job(self, name): + """ + Return the processing job info associated with the name + + :param name: the name of the processing job + :type name: str + :return: A dict contains all the processing job info + """ + + return self.get_conn().describe_processing_job(ProcessingJobName=name) + def describe_endpoint_config(self, name): """ Return the endpoint config info associated with the name @@ -786,6 +825,28 @@ def list_training_jobs( ) return results + def list_processing_jobs(self, **kwargs) -> List[Dict]: # noqa: D402 + """ + This method wraps boto3's list_processing_jobs(). All arguments should be provided via kwargs. + Note boto3 expects these in CamelCase format, for example: + + .. code-block:: python + + list_processing_jobs(NameContains="myjob", StatusEquals="Failed") + + .. seealso:: + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sagemaker.html#SageMaker.Client.list_processing_jobs + + :param kwargs: (optional) kwargs to boto3's list_training_jobs method + :return: results of the list_processing_jobs request + """ + + list_processing_jobs_request = partial(self.get_conn().list_processing_jobs, **kwargs) + results = self._list_request( + list_processing_jobs_request, "ProcessingJobSummaries", max_results=kwargs.get("MaxResults") + ) + return results + def _list_request(self, partial_func, result_key: str, max_results: Optional[int] = None) -> List[Dict]: """ All AWS boto3 list_* requests return results in batches (if the key "NextToken" is contained in the diff --git a/airflow/providers/amazon/aws/operators/athena.py b/airflow/providers/amazon/aws/operators/athena.py index 3294588bc8c48..4d734d0097ed5 100644 --- a/airflow/providers/amazon/aws/operators/athena.py +++ b/airflow/providers/amazon/aws/operators/athena.py @@ -16,9 +16,11 @@ # specific language governing permissions and limitations # under the License. # - +from typing import Any, Dict, Optional from uuid import uuid4 +from cached_property import cached_property + from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.athena import AWSAthenaHook from airflow.utils.decorators import apply_defaults @@ -36,10 +38,18 @@ class AWSAthenaOperator(BaseOperator): :type output_location: str :param aws_conn_id: aws connection to use :type aws_conn_id: str + :param client_request_token: Unique token created by user to avoid multiple executions of same query + :type client_request_token: str + :param workgroup: Athena workgroup in which query will be run + :type workgroup: str + :param query_execution_context: Context in which query need to be run + :type query_execution_context: dict + :param result_configuration: Dict with path to store results in and config related to encryption + :type result_configuration: dict :param sleep_time: Time to wait between two consecutive call to check query status on athena :type sleep_time: int :param max_tries: Number of times to poll for query state before function exits - :type max_triex: int + :type max_tries: int """ ui_color = '#44b5e2' @@ -48,21 +58,20 @@ class AWSAthenaOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, - query, - database, - output_location, - aws_conn_id="aws_default", - client_request_token=None, - workgroup="primary", - query_execution_context=None, - result_configuration=None, - sleep_time=30, - max_tries=None, - *args, - **kwargs - ): - super().__init__(*args, **kwargs) + self, *, + query: str, + database: str, + output_location: str, + aws_conn_id: str = "aws_default", + client_request_token: Optional[str] = None, + workgroup: str = "primary", + query_execution_context: Optional[Dict[str, str]] = None, + result_configuration: Optional[Dict[str, Any]] = None, + sleep_time: int = 30, + max_tries: Optional[int] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) self.query = query self.database = database self.output_location = output_location @@ -73,19 +82,17 @@ def __init__( # pylint: disable=too-many-arguments self.result_configuration = result_configuration or {} self.sleep_time = sleep_time self.max_tries = max_tries - self.query_execution_id = None - self.hook = None + self.query_execution_id = None # type: Optional[str] - def get_hook(self): + @cached_property + def hook(self) -> AWSAthenaHook: """Create and return an AWSAthenaHook.""" return AWSAthenaHook(self.aws_conn_id, sleep_time=self.sleep_time) - def execute(self, context): + def execute(self, context: dict) -> Optional[str]: """ Run Presto Query on Athena """ - self.hook = self.get_hook() - self.query_execution_context['Database'] = self.database self.result_configuration['OutputLocation'] = self.output_location self.query_execution_id = self.hook.run_query(self.query, self.query_execution_context, @@ -106,7 +113,7 @@ def execute(self, context): return self.query_execution_id - def on_kill(self): + def on_kill(self) -> None: """ Cancel the submitted athena query """ diff --git a/airflow/providers/amazon/aws/operators/batch.py b/airflow/providers/amazon/aws/operators/batch.py index 7885aac0a0a6d..c865ade217170 100644 --- a/airflow/providers/amazon/aws/operators/batch.py +++ b/airflow/providers/amazon/aws/operators/batch.py @@ -99,7 +99,7 @@ class AwsBatchOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, job_name, job_definition, job_queue, diff --git a/airflow/providers/amazon/aws/operators/cloud_formation.py b/airflow/providers/amazon/aws/operators/cloud_formation.py index f6eadae9ec298..f0dc0c4b17cf0 100644 --- a/airflow/providers/amazon/aws/operators/cloud_formation.py +++ b/airflow/providers/amazon/aws/operators/cloud_formation.py @@ -45,12 +45,12 @@ class CloudFormationCreateStackOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, stack_name, params, aws_conn_id='aws_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.stack_name = stack_name self.params = params self.aws_conn_id = aws_conn_id @@ -83,12 +83,12 @@ class CloudFormationDeleteStackOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, stack_name, params=None, aws_conn_id='aws_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.params = params or {} self.stack_name = stack_name self.params = params diff --git a/airflow/providers/amazon/aws/operators/datasync.py b/airflow/providers/amazon/aws/operators/datasync.py index 1bc3b9a06336b..5606614cd740c 100644 --- a/airflow/providers/amazon/aws/operators/datasync.py +++ b/airflow/providers/amazon/aws/operators/datasync.py @@ -107,7 +107,7 @@ class AWSDataSyncOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, aws_conn_id="aws_default", wait_interval_seconds=5, task_arn=None, @@ -121,10 +121,9 @@ def __init__( update_task_kwargs=None, task_execution_kwargs=None, delete_task_after_execution=False, - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) # Assignments self.aws_conn_id = aws_conn_id diff --git a/airflow/providers/amazon/aws/operators/ec2_start_instance.py b/airflow/providers/amazon/aws/operators/ec2_start_instance.py index b3ca97be0198a..dc657bf3c9ddf 100644 --- a/airflow/providers/amazon/aws/operators/ec2_start_instance.py +++ b/airflow/providers/amazon/aws/operators/ec2_start_instance.py @@ -44,14 +44,13 @@ class EC2StartInstanceOperator(BaseOperator): ui_fgcolor = "#ffffff" @apply_defaults - def __init__(self, + def __init__(self, *, instance_id: str, aws_conn_id: str = "aws_default", region_name: Optional[str] = None, check_interval: float = 15, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.instance_id = instance_id self.aws_conn_id = aws_conn_id self.region_name = region_name diff --git a/airflow/providers/amazon/aws/operators/ec2_stop_instance.py b/airflow/providers/amazon/aws/operators/ec2_stop_instance.py index 1b05847109c23..808284497a92c 100644 --- a/airflow/providers/amazon/aws/operators/ec2_stop_instance.py +++ b/airflow/providers/amazon/aws/operators/ec2_stop_instance.py @@ -44,14 +44,13 @@ class EC2StopInstanceOperator(BaseOperator): ui_fgcolor = "#ffffff" @apply_defaults - def __init__(self, + def __init__(self, *, instance_id: str, aws_conn_id: str = "aws_default", region_name: Optional[str] = None, check_interval: float = 15, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.instance_id = instance_id self.aws_conn_id = aws_conn_id self.region_name = region_name diff --git a/airflow/providers/amazon/aws/operators/ecs.py b/airflow/providers/amazon/aws/operators/ecs.py index 07dd0b28d3548..573b10d17d158 100644 --- a/airflow/providers/amazon/aws/operators/ecs.py +++ b/airflow/providers/amazon/aws/operators/ecs.py @@ -113,7 +113,7 @@ class ECSOperator(BaseOperator): # pylint: disable=too-many-instance-attributes template_fields = ('overrides',) @apply_defaults - def __init__(self, task_definition, cluster, overrides, # pylint: disable=too-many-arguments + def __init__(self, *, task_definition, cluster, overrides, # pylint: disable=too-many-arguments aws_conn_id=None, region_name=None, launch_type='EC2', group=None, placement_constraints=None, platform_version='LATEST', network_configuration=None, tags=None, awslogs_group=None, diff --git a/airflow/providers/amazon/aws/operators/emr_add_steps.py b/airflow/providers/amazon/aws/operators/emr_add_steps.py index 144de96de3362..dbed7a02cc95d 100644 --- a/airflow/providers/amazon/aws/operators/emr_add_steps.py +++ b/airflow/providers/amazon/aws/operators/emr_add_steps.py @@ -16,6 +16,7 @@ # specific language governing permissions and limitations # under the License. import ast +from typing import Any, Dict, List, Optional, Union from airflow.exceptions import AirflowException from airflow.models import BaseOperator @@ -51,17 +52,18 @@ class EmrAddStepsOperator(BaseOperator): @apply_defaults def __init__( self, - job_flow_id=None, - job_flow_name=None, - cluster_states=None, - aws_conn_id='aws_default', - steps=None, + job_flow_id: Optional[str] = None, + job_flow_name: Optional[str] = None, + cluster_states: Optional[List[str]] = None, + aws_conn_id: str = 'aws_default', + steps: Optional[Union[List[dict], str]] = None, *args, **kwargs): if kwargs.get('xcom_push') is not None: raise AirflowException("'xcom_push' was deprecated, use 'do_xcom_push' instead") if not (job_flow_id is None) ^ (job_flow_name is None): raise AirflowException('Exactly one of job_flow_id or job_flow_name must be specified.') super().__init__(*args, **kwargs) + cluster_states = cluster_states or [] steps = steps or [] self.aws_conn_id = aws_conn_id self.job_flow_id = job_flow_id @@ -69,13 +71,14 @@ def __init__( self.cluster_states = cluster_states self.steps = steps - def execute(self, context): + def execute(self, context: Dict[str, Any]) -> List[str]: emr_hook = EmrHook(aws_conn_id=self.aws_conn_id) emr = emr_hook.get_conn() - job_flow_id = self.job_flow_id or emr_hook.get_cluster_id_by_name(self.job_flow_name, - self.cluster_states) + job_flow_id = self.job_flow_id or \ + emr_hook.get_cluster_id_by_name(str(self.job_flow_name), self.cluster_states) + if not job_flow_id: raise AirflowException(f'No cluster found for name: {self.job_flow_name}') diff --git a/airflow/providers/amazon/aws/operators/emr_create_job_flow.py b/airflow/providers/amazon/aws/operators/emr_create_job_flow.py index 8ecee355c0c9d..3ecbbbacca4f4 100644 --- a/airflow/providers/amazon/aws/operators/emr_create_job_flow.py +++ b/airflow/providers/amazon/aws/operators/emr_create_job_flow.py @@ -16,6 +16,7 @@ # specific language governing permissions and limitations # under the License. import ast +from typing import Any, Dict, Optional, Union from airflow.exceptions import AirflowException from airflow.models import BaseOperator @@ -44,10 +45,10 @@ class EmrCreateJobFlowOperator(BaseOperator): @apply_defaults def __init__( self, - aws_conn_id='aws_default', - emr_conn_id='emr_default', - job_flow_overrides=None, - region_name=None, + aws_conn_id: str = 'aws_default', + emr_conn_id: str = 'emr_default', + job_flow_overrides: Optional[Union[str, Dict[str, Any]]] = None, + region_name: Optional[str] = None, *args, **kwargs): super().__init__(*args, **kwargs) self.aws_conn_id = aws_conn_id @@ -57,7 +58,7 @@ def __init__( self.job_flow_overrides = job_flow_overrides self.region_name = region_name - def execute(self, context): + def execute(self, context: Dict[str, Any]) -> str: emr = EmrHook(aws_conn_id=self.aws_conn_id, emr_conn_id=self.emr_conn_id, region_name=self.region_name) @@ -68,9 +69,11 @@ def execute(self, context): ) if isinstance(self.job_flow_overrides, str): - self.job_flow_overrides = ast.literal_eval(self.job_flow_overrides) - - response = emr.create_job_flow(self.job_flow_overrides) + job_flow_overrides: Dict[str, Any] = ast.literal_eval(self.job_flow_overrides) + self.job_flow_overrides = job_flow_overrides + else: + job_flow_overrides = self.job_flow_overrides + response = emr.create_job_flow(job_flow_overrides) if not response['ResponseMetadata']['HTTPStatusCode'] == 200: raise AirflowException('JobFlow creation failed: %s' % response) diff --git a/airflow/providers/amazon/aws/operators/emr_modify_cluster.py b/airflow/providers/amazon/aws/operators/emr_modify_cluster.py index bbe163d497670..a9631797da106 100644 --- a/airflow/providers/amazon/aws/operators/emr_modify_cluster.py +++ b/airflow/providers/amazon/aws/operators/emr_modify_cluster.py @@ -15,6 +15,9 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +from typing import Any, Dict + from airflow.exceptions import AirflowException from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.emr import EmrHook @@ -39,19 +42,19 @@ class EmrModifyClusterOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, cluster_id: str, step_concurrency_level: int, aws_conn_id: str = 'aws_default', - *args, **kwargs): + **kwargs): if kwargs.get('xcom_push') is not None: raise AirflowException("'xcom_push' was deprecated, use 'do_xcom_push' instead") - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.aws_conn_id = aws_conn_id self.cluster_id = cluster_id self.step_concurrency_level = step_concurrency_level - def execute(self, context): + def execute(self, context: Dict[str, Any]) -> int: emr_hook = EmrHook(aws_conn_id=self.aws_conn_id) emr = emr_hook.get_conn() diff --git a/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py b/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py index 4599f73cc103a..8cf273149a00e 100644 --- a/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py +++ b/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py @@ -15,6 +15,9 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +from typing import Any, Dict + from airflow.exceptions import AirflowException from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.emr import EmrHook @@ -37,14 +40,14 @@ class EmrTerminateJobFlowOperator(BaseOperator): @apply_defaults def __init__( self, - job_flow_id, - aws_conn_id='aws_default', + job_flow_id: str, + aws_conn_id: str = 'aws_default', *args, **kwargs): super().__init__(*args, **kwargs) self.job_flow_id = job_flow_id self.aws_conn_id = aws_conn_id - def execute(self, context): + def execute(self, context: Dict[str, Any]) -> None: emr = EmrHook(aws_conn_id=self.aws_conn_id).get_conn() self.log.info('Terminating JobFlow %s', self.job_flow_id) diff --git a/airflow/providers/amazon/aws/operators/glue.py b/airflow/providers/amazon/aws/operators/glue.py index 67e845518c904..055a43d1413ba 100644 --- a/airflow/providers/amazon/aws/operators/glue.py +++ b/airflow/providers/amazon/aws/operators/glue.py @@ -57,7 +57,7 @@ class AwsGlueJobOperator(BaseOperator): ui_color = '#ededed' @apply_defaults - def __init__(self, + def __init__(self, *, job_name='aws_glue_default_job', job_desc='AWS Glue Job with Airflow', script_location=None, @@ -69,9 +69,9 @@ def __init__(self, region_name=None, s3_bucket=None, iam_role_name=None, - *args, **kwargs + **kwargs ): # pylint: disable=too-many-arguments - super(AwsGlueJobOperator, self).__init__(*args, **kwargs) + super(AwsGlueJobOperator, self).__init__(**kwargs) self.job_name = job_name self.job_desc = job_desc self.script_location = script_location diff --git a/airflow/providers/amazon/aws/operators/s3_bucket.py b/airflow/providers/amazon/aws/operators/s3_bucket.py index a740aba6e3363..f7d9822cf17a6 100644 --- a/airflow/providers/amazon/aws/operators/s3_bucket.py +++ b/airflow/providers/amazon/aws/operators/s3_bucket.py @@ -22,6 +22,7 @@ from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.s3 import S3Hook +from airflow.utils.decorators import apply_defaults class S3CreateBucketOperator(BaseOperator): @@ -39,13 +40,13 @@ class S3CreateBucketOperator(BaseOperator): :param region_name: AWS region_name. If not specified fetched from connection. :type region_name: Optional[str] """ - def __init__(self, + @apply_defaults + def __init__(self, *, bucket_name, aws_conn_id: Optional[str] = "aws_default", region_name: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.bucket_name = bucket_name self.region_name = region_name self.aws_conn_id = aws_conn_id @@ -79,9 +80,8 @@ def __init__(self, bucket_name, force_delete: Optional[bool] = False, aws_conn_id: Optional[str] = "aws_default", - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.bucket_name = bucket_name self.force_delete = force_delete self.aws_conn_id = aws_conn_id diff --git a/airflow/providers/amazon/aws/operators/s3_copy_object.py b/airflow/providers/amazon/aws/operators/s3_copy_object.py index 5d67d4e5f55f7..8d1dd9ca8c92a 100644 --- a/airflow/providers/amazon/aws/operators/s3_copy_object.py +++ b/airflow/providers/amazon/aws/operators/s3_copy_object.py @@ -69,7 +69,7 @@ class S3CopyObjectOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, source_bucket_key, dest_bucket_key, source_bucket_name=None, @@ -77,8 +77,8 @@ def __init__( source_version_id=None, aws_conn_id='aws_default', verify=None, - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.source_bucket_key = source_bucket_key self.dest_bucket_key = dest_bucket_key diff --git a/airflow/providers/amazon/aws/operators/s3_delete_objects.py b/airflow/providers/amazon/aws/operators/s3_delete_objects.py index 25047fe121f04..d8c4683873acf 100644 --- a/airflow/providers/amazon/aws/operators/s3_delete_objects.py +++ b/airflow/providers/amazon/aws/operators/s3_delete_objects.py @@ -63,18 +63,18 @@ class S3DeleteObjectsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, bucket, keys=None, prefix=None, aws_conn_id='aws_default', verify=None, - *args, **kwargs): + **kwargs): if not bool(keys) ^ bool(prefix): raise ValueError("Either keys or prefix should be set.") - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.bucket = bucket self.keys = keys self.prefix = prefix diff --git a/airflow/providers/amazon/aws/operators/s3_file_transform.py b/airflow/providers/amazon/aws/operators/s3_file_transform.py index 4f0e626fc37e8..4324d204d31c2 100644 --- a/airflow/providers/amazon/aws/operators/s3_file_transform.py +++ b/airflow/providers/amazon/aws/operators/s3_file_transform.py @@ -84,7 +84,7 @@ class S3FileTransformOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, source_s3_key: str, dest_s3_key: str, transform_script: Optional[str] = None, @@ -95,9 +95,9 @@ def __init__( dest_aws_conn_id: str = 'aws_default', dest_verify: Optional[Union[bool, str]] = None, replace: bool = False, - *args, **kwargs) -> None: + **kwargs) -> None: # pylint: disable=too-many-arguments - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.source_s3_key = source_s3_key self.source_aws_conn_id = source_aws_conn_id self.source_verify = source_verify diff --git a/airflow/providers/amazon/aws/operators/s3_list.py b/airflow/providers/amazon/aws/operators/s3_list.py index 2406a22fb4598..427ff3fcaa929 100644 --- a/airflow/providers/amazon/aws/operators/s3_list.py +++ b/airflow/providers/amazon/aws/operators/s3_list.py @@ -69,15 +69,14 @@ class S3ListOperator(BaseOperator): ui_color = '#ffd700' @apply_defaults - def __init__(self, + def __init__(self, *, bucket, prefix='', delimiter='', aws_conn_id='aws_default', verify=None, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.bucket = bucket self.prefix = prefix self.delimiter = delimiter diff --git a/airflow/providers/amazon/aws/operators/sagemaker_base.py b/airflow/providers/amazon/aws/operators/sagemaker_base.py index a4dd0255ccb17..e5c42ac947782 100644 --- a/airflow/providers/amazon/aws/operators/sagemaker_base.py +++ b/airflow/providers/amazon/aws/operators/sagemaker_base.py @@ -41,11 +41,11 @@ class SageMakerBaseOperator(BaseOperator): integer_fields = [] # type: Iterable[Iterable[str]] @apply_defaults - def __init__(self, + def __init__(self, *, config, aws_conn_id='aws_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.aws_conn_id = aws_conn_id self.config = config diff --git a/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py b/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py index 9a0cd4d97022f..aa444fa5d8e54 100644 --- a/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py +++ b/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py @@ -71,15 +71,15 @@ class SageMakerEndpointOperator(SageMakerBaseOperator): """ @apply_defaults - def __init__(self, + def __init__(self, *, config, wait_for_completion=True, check_interval=30, max_ingestion_time=None, operation='create', - *args, **kwargs): + **kwargs): super().__init__(config=config, - *args, **kwargs) + **kwargs) self.config = config self.wait_for_completion = wait_for_completion diff --git a/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py b/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py index 8a2a9eb3ea4f8..f1d38bf185325 100644 --- a/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py +++ b/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py @@ -40,11 +40,11 @@ class SageMakerEndpointConfigOperator(SageMakerBaseOperator): ] @apply_defaults - def __init__(self, + def __init__(self, *, config, - *args, **kwargs): + **kwargs): super().__init__(config=config, - *args, **kwargs) + **kwargs) self.config = config diff --git a/airflow/providers/amazon/aws/operators/sagemaker_model.py b/airflow/providers/amazon/aws/operators/sagemaker_model.py index 19c5373e9a6b2..31e2fbd6ae496 100644 --- a/airflow/providers/amazon/aws/operators/sagemaker_model.py +++ b/airflow/providers/amazon/aws/operators/sagemaker_model.py @@ -37,11 +37,11 @@ class SageMakerModelOperator(SageMakerBaseOperator): """ @apply_defaults - def __init__(self, + def __init__(self, *, config, - *args, **kwargs): + **kwargs): super().__init__(config=config, - *args, **kwargs) + **kwargs) self.config = config diff --git a/airflow/providers/amazon/aws/operators/sagemaker_processing.py b/airflow/providers/amazon/aws/operators/sagemaker_processing.py new file mode 100644 index 0000000000000..ef2fd6989a8a8 --- /dev/null +++ b/airflow/providers/amazon/aws/operators/sagemaker_processing.py @@ -0,0 +1,125 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from airflow.exceptions import AirflowException +from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook +from airflow.providers.amazon.aws.operators.sagemaker_base import SageMakerBaseOperator +from airflow.utils.decorators import apply_defaults + + +class SageMakerProcessingOperator(SageMakerBaseOperator): + """ + Initiate a SageMaker processing job. + + This operator returns The ARN of the processing job created in Amazon SageMaker. + + :param config: The configuration necessary to start a processing job (templated). + + For details of the configuration parameter see :py:meth:`SageMaker.Client.create_processing_job` + :type config: dict + :param aws_conn_id: The AWS connection ID to use. + :type aws_conn_id: str + :param wait_for_completion: If wait is set to True, the time interval, in seconds, + that the operation waits to check the status of the processing job. + :type wait_for_completion: bool + :param print_log: if the operator should print the cloudwatch log during processing + :type print_log: bool + :param check_interval: if wait is set to be true, this is the time interval + in seconds which the operator will check the status of the processing job + :type check_interval: int + :param max_ingestion_time: If wait is set to True, the operation fails if the processing job + doesn't finish within max_ingestion_time seconds. If you set this parameter to None, + the operation does not timeout. + :type max_ingestion_time: int + :param action_if_job_exists: Behaviour if the job name already exists. Possible options are "increment" + (default) and "fail". + :type action_if_job_exists: str + """ + + @apply_defaults + def __init__(self, *, + config, + aws_conn_id, + wait_for_completion=True, + print_log=True, + check_interval=30, + max_ingestion_time=None, + action_if_job_exists: str = "increment", # TODO use typing.Literal for this in Python 3.8 + **kwargs): + super().__init__(config=config, aws_conn_id=aws_conn_id, **kwargs) + + if action_if_job_exists not in ("increment", "fail"): + raise AirflowException( + "Argument action_if_job_exists accepts only 'increment' and 'fail'. " + f"Provided value: '{action_if_job_exists}'." + ) + self.action_if_job_exists = action_if_job_exists + self.wait_for_completion = wait_for_completion + self.print_log = print_log + self.check_interval = check_interval + self.max_ingestion_time = max_ingestion_time + self._create_integer_fields() + + def _create_integer_fields(self): + """Set fields which should be casted to integers.""" + self.integer_fields = [ + ['ProcessingResources', 'ClusterConfig', 'InstanceCount'], + ['ProcessingResources', 'ClusterConfig', 'VolumeSizeInGB'] + ] + if 'StoppingCondition' in self.config: + self.integer_fields += [ + ['StoppingCondition', 'MaxRuntimeInSeconds'] + ] + + def expand_role(self): + if 'RoleArn' in self.config: + hook = AwsBaseHook(self.aws_conn_id, client_type='iam') + self.config['RoleArn'] = hook.expand_role(self.config['RoleArn']) + + def execute(self, context): + self.preprocess_config() + + processing_job_name = self.config["ProcessingJobName"] + processing_jobs = self.hook.list_processing_jobs(NameContains=processing_job_name) + + # Check if given ProcessingJobName already exists + if processing_job_name in [pj["ProcessingJobName"] for pj in processing_jobs]: + if self.action_if_job_exists == "fail": + raise AirflowException( + f"A SageMaker processing job with name {processing_job_name} already exists." + ) + if self.action_if_job_exists == "increment": + self.log.info("Found existing processing job with name '%s'.", processing_job_name) + new_processing_job_name = f"{processing_job_name}-{len(processing_jobs) + 1}" + self.config["ProcessingJobName"] = new_processing_job_name + self.log.info("Incremented processing job name to '%s'.", new_processing_job_name) + + self.log.info("Creating SageMaker processing job %s.", self.config["ProcessingJobName"]) + response = self.hook.create_processing_job( + self.config, + wait_for_completion=self.wait_for_completion, + check_interval=self.check_interval, + max_ingestion_time=self.max_ingestion_time + ) + if response['ResponseMetadata']['HTTPStatusCode'] != 200: + raise AirflowException('Sagemaker Processing Job creation failed: %s' % response) + return { + 'Processing': self.hook.describe_processing_job( + self.config['ProcessingJobName'] + ) + } diff --git a/airflow/providers/amazon/aws/operators/sagemaker_training.py b/airflow/providers/amazon/aws/operators/sagemaker_training.py index f15ea4c290585..9bdbe56e38efc 100644 --- a/airflow/providers/amazon/aws/operators/sagemaker_training.py +++ b/airflow/providers/amazon/aws/operators/sagemaker_training.py @@ -58,7 +58,7 @@ class SageMakerTrainingOperator(SageMakerBaseOperator): ] @apply_defaults - def __init__(self, + def __init__(self, *, config, wait_for_completion=True, print_log=True, diff --git a/airflow/providers/amazon/aws/operators/sagemaker_transform.py b/airflow/providers/amazon/aws/operators/sagemaker_transform.py index 799ed2bca0921..221bf82aee0ab 100644 --- a/airflow/providers/amazon/aws/operators/sagemaker_transform.py +++ b/airflow/providers/amazon/aws/operators/sagemaker_transform.py @@ -62,14 +62,14 @@ class SageMakerTransformOperator(SageMakerBaseOperator): """ @apply_defaults - def __init__(self, + def __init__(self, *, config, wait_for_completion=True, check_interval=30, max_ingestion_time=None, - *args, **kwargs): + **kwargs): super().__init__(config=config, - *args, **kwargs) + **kwargs) self.config = config self.wait_for_completion = wait_for_completion self.check_interval = check_interval diff --git a/airflow/providers/amazon/aws/operators/sagemaker_tuning.py b/airflow/providers/amazon/aws/operators/sagemaker_tuning.py index 3dcc20ce58a34..16268865f79f8 100644 --- a/airflow/providers/amazon/aws/operators/sagemaker_tuning.py +++ b/airflow/providers/amazon/aws/operators/sagemaker_tuning.py @@ -55,14 +55,14 @@ class SageMakerTuningOperator(SageMakerBaseOperator): ] @apply_defaults - def __init__(self, + def __init__(self, *, config, wait_for_completion=True, check_interval=30, max_ingestion_time=None, - *args, **kwargs): + **kwargs): super().__init__(config=config, - *args, **kwargs) + **kwargs) self.config = config self.wait_for_completion = wait_for_completion self.check_interval = check_interval diff --git a/airflow/providers/amazon/aws/operators/sns.py b/airflow/providers/amazon/aws/operators/sns.py index 6988b186c711c..3f24813766600 100644 --- a/airflow/providers/amazon/aws/operators/sns.py +++ b/airflow/providers/amazon/aws/operators/sns.py @@ -44,14 +44,14 @@ class SnsPublishOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, target_arn, message, aws_conn_id='aws_default', subject=None, message_attributes=None, - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.target_arn = target_arn self.message = message self.subject = subject diff --git a/airflow/providers/amazon/aws/operators/sqs.py b/airflow/providers/amazon/aws/operators/sqs.py index 1d9b1fcde4604..e0edc3fb857ab 100644 --- a/airflow/providers/amazon/aws/operators/sqs.py +++ b/airflow/providers/amazon/aws/operators/sqs.py @@ -42,15 +42,14 @@ class SQSPublishOperator(BaseOperator): ui_color = '#6ad3fa' @apply_defaults - def __init__(self, + def __init__(self, *, sqs_queue, message_content, message_attributes=None, delay_seconds=0, aws_conn_id='aws_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.sqs_queue = sqs_queue self.aws_conn_id = aws_conn_id self.message_content = message_content diff --git a/airflow/providers/amazon/aws/operators/step_function_get_execution_output.py b/airflow/providers/amazon/aws/operators/step_function_get_execution_output.py index 2ef531c782b3f..404ce2416f3de 100644 --- a/airflow/providers/amazon/aws/operators/step_function_get_execution_output.py +++ b/airflow/providers/amazon/aws/operators/step_function_get_execution_output.py @@ -41,8 +41,8 @@ class StepFunctionGetExecutionOutputOperator(BaseOperator): ui_color = '#f9c915' @apply_defaults - def __init__(self, execution_arn: str, aws_conn_id='aws_default', region_name=None, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, *, execution_arn: str, aws_conn_id='aws_default', region_name=None, **kwargs): + super().__init__(**kwargs) self.execution_arn = execution_arn self.aws_conn_id = aws_conn_id self.region_name = region_name diff --git a/airflow/providers/amazon/aws/operators/step_function_start_execution.py b/airflow/providers/amazon/aws/operators/step_function_start_execution.py index f5ea75ca3994d..0b22c88afef90 100644 --- a/airflow/providers/amazon/aws/operators/step_function_start_execution.py +++ b/airflow/providers/amazon/aws/operators/step_function_start_execution.py @@ -48,11 +48,11 @@ class StepFunctionStartExecutionOperator(BaseOperator): ui_color = '#f9c915' @apply_defaults - def __init__(self, state_machine_arn: str, name: Optional[str] = None, + def __init__(self, *, state_machine_arn: str, name: Optional[str] = None, state_machine_input: Union[dict, str, None] = None, aws_conn_id='aws_default', region_name=None, - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.state_machine_arn = state_machine_arn self.name = name self.input = state_machine_input diff --git a/airflow/providers/amazon/aws/sensors/athena.py b/airflow/providers/amazon/aws/sensors/athena.py index 76fbd64021e36..50edc8dd4e702 100644 --- a/airflow/providers/amazon/aws/sensors/athena.py +++ b/airflow/providers/amazon/aws/sensors/athena.py @@ -15,6 +15,9 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from typing import Any, Optional + +from cached_property import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.athena import AWSAthenaHook @@ -25,16 +28,16 @@ class AthenaSensor(BaseSensorOperator): """ Asks for the state of the Query until it reaches a failure state or success state. - If it fails, failing the task. + If the query fails, the task will fail. :param query_execution_id: query_execution_id to check the state of :type query_execution_id: str - :param max_retires: Number of times to poll for query state before + :param max_retries: Number of times to poll for query state before returning the current state, defaults to None - :type max_retires: int + :type max_retries: int :param aws_conn_id: aws connection to use, defaults to 'aws_default' :type aws_conn_id: str - :param sleep_time: Time to wait between two consecutive call to + :param sleep_time: Time in seconds to wait between two consecutive call to check query status on athena, defaults to 10 :type sleep_time: int """ @@ -48,21 +51,20 @@ class AthenaSensor(BaseSensorOperator): ui_color = '#66c3ff' @apply_defaults - def __init__(self, - query_execution_id, - max_retires=None, - aws_conn_id='aws_default', - sleep_time=10, - *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, *, + query_execution_id: str, + max_retries: Optional[int] = None, + aws_conn_id: str = 'aws_default', + sleep_time: int = 10, + **kwargs: Any) -> None: + super().__init__(**kwargs) self.aws_conn_id = aws_conn_id self.query_execution_id = query_execution_id self.sleep_time = sleep_time - self.max_retires = max_retires - self.hook = None + self.max_retries = max_retries - def poke(self, context): - state = self.get_hook().poll_query_status(self.query_execution_id, self.max_retires) + def poke(self, context: dict) -> bool: + state = self.hook.poll_query_status(self.query_execution_id, self.max_retries) if state in self.FAILURE_STATES: raise AirflowException('Athena sensor failed') @@ -71,8 +73,7 @@ def poke(self, context): return False return True - def get_hook(self): + @cached_property + def hook(self) -> AWSAthenaHook: """Create and return an AWSAthenaHook""" - if not self.hook: - self.hook = AWSAthenaHook(self.aws_conn_id, self.sleep_time) - return self.hook + return AWSAthenaHook(self.aws_conn_id, self.sleep_time) diff --git a/airflow/providers/amazon/aws/sensors/cloud_formation.py b/airflow/providers/amazon/aws/sensors/cloud_formation.py index 0e83334231246..05f15a394e7d1 100644 --- a/airflow/providers/amazon/aws/sensors/cloud_formation.py +++ b/airflow/providers/amazon/aws/sensors/cloud_formation.py @@ -40,13 +40,12 @@ class CloudFormationCreateStackSensor(BaseSensorOperator): ui_color = '#C5CAE9' @apply_defaults - def __init__(self, + def __init__(self, *, stack_name, aws_conn_id='aws_default', region_name=None, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.stack_name = stack_name self.hook = AWSCloudFormationHook(aws_conn_id=aws_conn_id, region_name=region_name) @@ -76,13 +75,12 @@ class CloudFormationDeleteStackSensor(BaseSensorOperator): ui_color = '#C5CAE9' @apply_defaults - def __init__(self, + def __init__(self, *, stack_name, aws_conn_id='aws_default', region_name=None, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.aws_conn_id = aws_conn_id self.region_name = region_name self.stack_name = stack_name diff --git a/airflow/providers/amazon/aws/sensors/ec2_instance_state.py b/airflow/providers/amazon/aws/sensors/ec2_instance_state.py index b96b11b05e6df..c2a53c891e4b8 100644 --- a/airflow/providers/amazon/aws/sensors/ec2_instance_state.py +++ b/airflow/providers/amazon/aws/sensors/ec2_instance_state.py @@ -43,16 +43,15 @@ class EC2InstanceStateSensor(BaseSensorOperator): valid_states = ["running", "stopped", "terminated"] @apply_defaults - def __init__(self, + def __init__(self, *, target_state: str, instance_id: str, aws_conn_id: str = "aws_default", region_name: Optional[str] = None, - *args, **kwargs): if target_state not in self.valid_states: raise ValueError(f"Invalid target_state: {target_state}") - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.target_state = target_state self.instance_id = instance_id self.aws_conn_id = aws_conn_id diff --git a/airflow/providers/amazon/aws/sensors/emr_base.py b/airflow/providers/amazon/aws/sensors/emr_base.py index 2b7bdd9d50245..d487af2146e24 100644 --- a/airflow/providers/amazon/aws/sensors/emr_base.py +++ b/airflow/providers/amazon/aws/sensors/emr_base.py @@ -42,10 +42,10 @@ class EmrBaseSensor(BaseSensorOperator): @apply_defaults def __init__( - self, + self, *, aws_conn_id='aws_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.aws_conn_id = aws_conn_id self.target_states = None # will be set in subclasses self.failed_states = None # will be set in subclasses diff --git a/airflow/providers/amazon/aws/sensors/emr_job_flow.py b/airflow/providers/amazon/aws/sensors/emr_job_flow.py index e36190a3b6a96..004b8b8218df1 100644 --- a/airflow/providers/amazon/aws/sensors/emr_job_flow.py +++ b/airflow/providers/amazon/aws/sensors/emr_job_flow.py @@ -46,13 +46,12 @@ class EmrJobFlowSensor(EmrBaseSensor): template_ext = () @apply_defaults - def __init__(self, + def __init__(self, *, job_flow_id: str, target_states: Optional[Iterable[str]] = None, failed_states: Optional[Iterable[str]] = None, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.job_flow_id = job_flow_id self.target_states = target_states or ['TERMINATED'] self.failed_states = failed_states or ['TERMINATED_WITH_ERRORS'] diff --git a/airflow/providers/amazon/aws/sensors/emr_step.py b/airflow/providers/amazon/aws/sensors/emr_step.py index b4217477cbff0..65394c8927ea4 100644 --- a/airflow/providers/amazon/aws/sensors/emr_step.py +++ b/airflow/providers/amazon/aws/sensors/emr_step.py @@ -46,14 +46,13 @@ class EmrStepSensor(EmrBaseSensor): template_ext = () @apply_defaults - def __init__(self, + def __init__(self, *, job_flow_id: str, step_id: str, target_states: Optional[Iterable[str]] = None, failed_states: Optional[Iterable[str]] = None, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.job_flow_id = job_flow_id self.step_id = step_id self.target_states = target_states or ['COMPLETED'] diff --git a/airflow/providers/amazon/aws/sensors/glue.py b/airflow/providers/amazon/aws/sensors/glue.py index f02d2df1fb2eb..4525602a43c21 100644 --- a/airflow/providers/amazon/aws/sensors/glue.py +++ b/airflow/providers/amazon/aws/sensors/glue.py @@ -35,13 +35,12 @@ class AwsGlueJobSensor(BaseSensorOperator): template_fields = ('job_name', 'run_id') @apply_defaults - def __init__(self, + def __init__(self, *, job_name, run_id, aws_conn_id='aws_default', - *args, **kwargs): - super(AwsGlueJobSensor, self).__init__(*args, **kwargs) + super().__init__(**kwargs) self.job_name = job_name self.run_id = run_id self.aws_conn_id = aws_conn_id diff --git a/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py b/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py index 45c057b5a5557..5d900abf436ba 100644 --- a/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +++ b/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py @@ -51,16 +51,15 @@ class AwsGlueCatalogPartitionSensor(BaseSensorOperator): ui_color = '#C5CAE9' @apply_defaults - def __init__(self, + def __init__(self, *, table_name, expression="ds='{{ ds }}'", aws_conn_id='aws_default', region_name=None, database_name='default', poke_interval=60 * 3, - *args, **kwargs): super().__init__( - poke_interval=poke_interval, *args, **kwargs) + poke_interval=poke_interval, **kwargs) self.aws_conn_id = aws_conn_id self.region_name = region_name self.table_name = table_name diff --git a/airflow/providers/amazon/aws/sensors/redshift.py b/airflow/providers/amazon/aws/sensors/redshift.py index 211af30657254..0c893cadb3d63 100644 --- a/airflow/providers/amazon/aws/sensors/redshift.py +++ b/airflow/providers/amazon/aws/sensors/redshift.py @@ -33,13 +33,12 @@ class AwsRedshiftClusterSensor(BaseSensorOperator): template_fields = ('cluster_identifier', 'target_status') @apply_defaults - def __init__(self, + def __init__(self, *, cluster_identifier, target_status='available', aws_conn_id='aws_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.cluster_identifier = cluster_identifier self.target_status = target_status self.aws_conn_id = aws_conn_id diff --git a/airflow/providers/amazon/aws/sensors/s3_key.py b/airflow/providers/amazon/aws/sensors/s3_key.py index adb027890fd24..2661daa0dc85d 100644 --- a/airflow/providers/amazon/aws/sensors/s3_key.py +++ b/airflow/providers/amazon/aws/sensors/s3_key.py @@ -58,15 +58,14 @@ class S3KeySensor(BaseSensorOperator): template_fields = ('bucket_key', 'bucket_name') @apply_defaults - def __init__(self, + def __init__(self, *, bucket_key, bucket_name=None, wildcard_match=False, aws_conn_id='aws_default', verify=None, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) # Parse if bucket_name is None: parsed_url = urlparse(bucket_key) diff --git a/airflow/providers/amazon/aws/sensors/s3_keys_unchanged.py b/airflow/providers/amazon/aws/sensors/s3_keys_unchanged.py new file mode 100644 index 0000000000000..95a2148a755fa --- /dev/null +++ b/airflow/providers/amazon/aws/sensors/s3_keys_unchanged.py @@ -0,0 +1,163 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import os +from datetime import datetime +from typing import Optional, Set, Union + +from cached_property import cached_property + +from airflow.exceptions import AirflowException +from airflow.providers.amazon.aws.hooks.s3 import S3Hook +from airflow.sensors.base_sensor_operator import BaseSensorOperator, poke_mode_only +from airflow.utils.decorators import apply_defaults + + +@poke_mode_only +class S3KeysUnchangedSensor(BaseSensorOperator): + """ + Checks for changes in the number of objects at prefix in AWS S3 + bucket and returns True if the inactivity period has passed with no + increase in the number of objects. Note, this sensor will not behave correctly + in reschedule mode, as the state of the listed objects in the S3 bucket will + be lost between rescheduled invocations. + + :param bucket_name: Name of the S3 bucket + :type bucket_name: str + :param prefix: The prefix being waited on. Relative path from bucket root level. + :type prefix: str + :param aws_conn_id: a reference to the s3 connection + :type aws_conn_id: str + :param verify: Whether or not to verify SSL certificates for S3 connection. + By default SSL certificates are verified. + You can provide the following values: + + - ``False``: do not validate SSL certificates. SSL will still be used + (unless use_ssl is False), but SSL certificates will not be + verified. + - ``path/to/cert/bundle.pem``: A filename of the CA cert bundle to uses. + You can specify this argument if you want to use a different + CA cert bundle than the one used by botocore. + :type verify: Optional[Union[bool, str]] + :param inactivity_period: The total seconds of inactivity to designate + keys unchanged. Note, this mechanism is not real time and + this operator may not return until a poke_interval after this period + has passed with no additional objects sensed. + :type inactivity_period: float + :param min_objects: The minimum number of objects needed for keys unchanged + sensor to be considered valid. + :type min_objects: int + :param previous_objects: The set of object ids found during the last poke. + :type previous_objects: Optional[Set[str]] + :param allow_delete: Should this sensor consider objects being deleted + between pokes valid behavior. If true a warning message will be logged + when this happens. If false an error will be raised. + :type allow_delete: bool + """ + + template_fields = ('bucket_name', 'prefix') + + @apply_defaults + def __init__(self, *, + bucket_name: str, + prefix: str, + aws_conn_id: str = 'aws_default', + verify: Optional[Union[bool, str]] = None, + inactivity_period: float = 60 * 60, + min_objects: int = 1, + previous_objects: Optional[Set[str]] = None, + allow_delete: bool = True, + **kwargs) -> None: + + super().__init__(**kwargs) + + self.bucket = bucket_name + self.prefix = prefix + if inactivity_period < 0: + raise ValueError("inactivity_period must be non-negative") + self.inactivity_period = inactivity_period + self.min_objects = min_objects + self.previous_objects = previous_objects or set() + self.inactivity_seconds = 0 + self.allow_delete = allow_delete + self.aws_conn_id = aws_conn_id + self.verify = verify + self.last_activity_time: Optional[datetime] = None + + @cached_property + def hook(self): + """ + Returns S3Hook. + """ + return S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify) + + def is_keys_unchanged(self, current_objects: Set[str]) -> bool: + """ + Checks whether new objects have been uploaded and the inactivity_period + has passed and updates the state of the sensor accordingly. + + :param current_objects: set of object ids in bucket during last poke. + :type current_objects: set[str] + """ + current_num_objects = len(current_objects) + if current_objects > self.previous_objects: + # When new objects arrived, reset the inactivity_seconds + # and update previous_objects for the next poke. + self.log.info("New objects found at %s, resetting last_activity_time.", + os.path.join(self.bucket, self.prefix)) + self.log.debug("New objects: %s", current_objects - self.previous_objects) + self.last_activity_time = datetime.now() + self.inactivity_seconds = 0 + self.previous_objects = current_objects + return False + + if self.previous_objects - current_objects: + # During the last poke interval objects were deleted. + if self.allow_delete: + deleted_objects = self.previous_objects - current_objects + self.previous_objects = current_objects + self.last_activity_time = datetime.now() + self.log.info("Objects were deleted during the last poke interval. Updating the " + "file counter and resetting last_activity_time:\n%s", deleted_objects) + return False + + raise AirflowException("Illegal behavior: objects were deleted in %s between pokes." + % os.path.join(self.bucket, self.prefix)) + + if self.last_activity_time: + self.inactivity_seconds = int((datetime.now() - self.last_activity_time).total_seconds()) + else: + # Handles the first poke where last inactivity time is None. + self.last_activity_time = datetime.now() + self.inactivity_seconds = 0 + + if self.inactivity_seconds >= self.inactivity_period: + path = os.path.join(self.bucket, self.prefix) + + if current_num_objects >= self.min_objects: + self.log.info("SUCCESS: \nSensor found %s objects at %s.\n" + "Waited at least %s seconds, with no new objects uploaded.", + current_num_objects, path, self.inactivity_period) + return True + + self.log.error("FAILURE: Inactivity Period passed, not enough objects found in %s", path) + + return False + return False + + def poke(self, context): + return self.is_keys_unchanged(set(self.hook.list_keys(self.bucket, prefix=self.prefix))) diff --git a/airflow/providers/amazon/aws/sensors/s3_prefix.py b/airflow/providers/amazon/aws/sensors/s3_prefix.py index 9647fdb0c5d23..acaf961830c59 100644 --- a/airflow/providers/amazon/aws/sensors/s3_prefix.py +++ b/airflow/providers/amazon/aws/sensors/s3_prefix.py @@ -54,15 +54,14 @@ class S3PrefixSensor(BaseSensorOperator): template_fields = ('prefix', 'bucket_name') @apply_defaults - def __init__(self, + def __init__(self, *, bucket_name, prefix, delimiter='/', aws_conn_id='aws_default', verify=None, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) # Parse self.bucket_name = bucket_name self.prefix = prefix diff --git a/airflow/providers/amazon/aws/sensors/sagemaker_base.py b/airflow/providers/amazon/aws/sensors/sagemaker_base.py index 39ccca8bfaa50..b3468df9a1b61 100644 --- a/airflow/providers/amazon/aws/sensors/sagemaker_base.py +++ b/airflow/providers/amazon/aws/sensors/sagemaker_base.py @@ -32,10 +32,10 @@ class SageMakerBaseSensor(BaseSensorOperator): @apply_defaults def __init__( - self, + self, *, aws_conn_id='aws_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.aws_conn_id = aws_conn_id self.hook = None diff --git a/airflow/providers/amazon/aws/sensors/sagemaker_endpoint.py b/airflow/providers/amazon/aws/sensors/sagemaker_endpoint.py index a59636820d059..b8df5bf644e68 100644 --- a/airflow/providers/amazon/aws/sensors/sagemaker_endpoint.py +++ b/airflow/providers/amazon/aws/sensors/sagemaker_endpoint.py @@ -34,11 +34,10 @@ class SageMakerEndpointSensor(SageMakerBaseSensor): template_ext = () @apply_defaults - def __init__(self, + def __init__(self, *, endpoint_name, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.endpoint_name = endpoint_name def non_terminal_states(self): diff --git a/airflow/providers/amazon/aws/sensors/sagemaker_training.py b/airflow/providers/amazon/aws/sensors/sagemaker_training.py index deacb2983db72..1695d95a59bea 100644 --- a/airflow/providers/amazon/aws/sensors/sagemaker_training.py +++ b/airflow/providers/amazon/aws/sensors/sagemaker_training.py @@ -38,12 +38,11 @@ class SageMakerTrainingSensor(SageMakerBaseSensor): template_ext = () @apply_defaults - def __init__(self, + def __init__(self, *, job_name, print_log=True, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.job_name = job_name self.print_log = print_log self.positions = {} diff --git a/airflow/providers/amazon/aws/sensors/sagemaker_transform.py b/airflow/providers/amazon/aws/sensors/sagemaker_transform.py index 1984aa082fe5f..5a9ffdc9b0350 100644 --- a/airflow/providers/amazon/aws/sensors/sagemaker_transform.py +++ b/airflow/providers/amazon/aws/sensors/sagemaker_transform.py @@ -35,11 +35,10 @@ class SageMakerTransformSensor(SageMakerBaseSensor): template_ext = () @apply_defaults - def __init__(self, + def __init__(self, *, job_name, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.job_name = job_name def non_terminal_states(self): diff --git a/airflow/providers/amazon/aws/sensors/sagemaker_tuning.py b/airflow/providers/amazon/aws/sensors/sagemaker_tuning.py index 71c05923bb1d3..6b97807c7ce47 100644 --- a/airflow/providers/amazon/aws/sensors/sagemaker_tuning.py +++ b/airflow/providers/amazon/aws/sensors/sagemaker_tuning.py @@ -35,11 +35,10 @@ class SageMakerTuningSensor(SageMakerBaseSensor): template_ext = () @apply_defaults - def __init__(self, + def __init__(self, *, job_name, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.job_name = job_name def non_terminal_states(self): diff --git a/airflow/providers/amazon/aws/sensors/sqs.py b/airflow/providers/amazon/aws/sensors/sqs.py index d959013880c62..573981bdc2a0d 100644 --- a/airflow/providers/amazon/aws/sensors/sqs.py +++ b/airflow/providers/amazon/aws/sensors/sqs.py @@ -44,14 +44,13 @@ class SQSSensor(BaseSensorOperator): template_fields = ('sqs_queue', 'max_messages') @apply_defaults - def __init__(self, + def __init__(self, *, sqs_queue, aws_conn_id='aws_default', max_messages=5, wait_time_seconds=1, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.sqs_queue = sqs_queue self.aws_conn_id = aws_conn_id self.max_messages = max_messages diff --git a/airflow/providers/amazon/aws/sensors/step_function_execution.py b/airflow/providers/amazon/aws/sensors/step_function_execution.py index 0cc3caf271806..a0e640e89b17b 100644 --- a/airflow/providers/amazon/aws/sensors/step_function_execution.py +++ b/airflow/providers/amazon/aws/sensors/step_function_execution.py @@ -47,9 +47,9 @@ class StepFunctionExecutionSensor(BaseSensorOperator): ui_color = '#66c3ff' @apply_defaults - def __init__(self, execution_arn: str, aws_conn_id='aws_default', region_name=None, - *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, *, execution_arn: str, aws_conn_id='aws_default', region_name=None, + **kwargs): + super().__init__(**kwargs) self.execution_arn = execution_arn self.aws_conn_id = aws_conn_id self.region_name = region_name diff --git a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py index 799d75de61fe7..7f71a5445115d 100644 --- a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py @@ -31,6 +31,7 @@ from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.aws_dynamodb import AwsDynamoDBHook from airflow.providers.amazon.aws.hooks.s3 import S3Hook +from airflow.utils.decorators import apply_defaults def _convert_item_to_json_bytes(item): @@ -90,15 +91,16 @@ class DynamoDBToS3Operator(BaseOperator): :param process_func: How we transforms a dynamodb item to bytes. By default we dump the json """ - def __init__(self, + @apply_defaults + def __init__(self, *, dynamodb_table_name: str, s3_bucket_name: str, file_size: int, dynamodb_scan_kwargs: Optional[Dict[str, Any]] = None, s3_key_prefix: str = '', process_func: Callable[[Dict[str, Any]], bytes] = _convert_item_to_json_bytes, - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.file_size = file_size self.process_func = process_func self.dynamodb_table_name = dynamodb_table_name diff --git a/airflow/providers/amazon/aws/transfers/gcs_to_s3.py b/airflow/providers/amazon/aws/transfers/gcs_to_s3.py index 28b2c5e595038..f7897a191ee4b 100644 --- a/airflow/providers/amazon/aws/transfers/gcs_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/gcs_to_s3.py @@ -78,7 +78,7 @@ class GCSToS3Operator(GCSListObjectsOperator): ui_color = '#f0eee4' @apply_defaults - def __init__(self, # pylint: disable=too-many-arguments + def __init__(self, *, # pylint: disable=too-many-arguments bucket, prefix=None, delimiter=None, @@ -89,7 +89,6 @@ def __init__(self, # pylint: disable=too-many-arguments dest_s3_key=None, dest_verify=None, replace=False, - *args, **kwargs): if google_cloud_storage_conn_id: @@ -104,7 +103,6 @@ def __init__(self, # pylint: disable=too-many-arguments delimiter=delimiter, gcp_conn_id=gcp_conn_id, delegate_to=delegate_to, - *args, **kwargs ) diff --git a/airflow/providers/amazon/aws/transfers/google_api_to_s3.py b/airflow/providers/amazon/aws/transfers/google_api_to_s3.py index e30e39b5d1a0a..741b8a0800b7b 100644 --- a/airflow/providers/amazon/aws/transfers/google_api_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/google_api_to_s3.py @@ -85,13 +85,12 @@ class GoogleApiToS3Operator(BaseOperator): @apply_defaults def __init__( - self, + self, *, google_api_service_name, google_api_service_version, google_api_endpoint_path, google_api_endpoint_params, s3_destination_key, - *args, google_api_response_via_xcom=None, google_api_endpoint_params_via_xcom=None, google_api_endpoint_params_via_xcom_task_ids=None, @@ -103,7 +102,7 @@ def __init__( aws_conn_id='aws_default', **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.google_api_service_name = google_api_service_name self.google_api_service_version = google_api_service_version self.google_api_endpoint_path = google_api_endpoint_path diff --git a/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py index 882a6fb2ebd38..3eecacaaa3c77 100644 --- a/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +++ b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py @@ -62,7 +62,7 @@ class HiveToDynamoDBOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, sql, table_name, table_keys, @@ -73,8 +73,8 @@ def __init__( # pylint: disable=too-many-arguments schema='default', hiveserver2_conn_id='hiveserver2_default', aws_conn_id='aws_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.sql = sql self.table_name = table_name self.table_keys = table_keys diff --git a/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py b/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py index a894ae4d2cfa9..79505f123cc80 100644 --- a/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py @@ -53,7 +53,7 @@ class ImapAttachmentToS3Operator(BaseOperator): template_fields = ('imap_attachment_name', 's3_key', 'imap_mail_filter') @apply_defaults - def __init__(self, + def __init__(self, *, imap_attachment_name, s3_key, imap_check_regex=False, @@ -62,9 +62,8 @@ def __init__(self, s3_overwrite=False, imap_conn_id='imap_default', s3_conn_id='aws_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.imap_attachment_name = imap_attachment_name self.s3_key = s3_key self.imap_check_regex = imap_check_regex diff --git a/airflow/providers/amazon/aws/transfers/mongo_to_s3.py b/airflow/providers/amazon/aws/transfers/mongo_to_s3.py index c50ab2740967d..214689c5efac7 100644 --- a/airflow/providers/amazon/aws/transfers/mongo_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/mongo_to_s3.py @@ -41,7 +41,7 @@ class MongoToS3Operator(BaseOperator): # pylint: disable=too-many-instance-attributes @apply_defaults - def __init__(self, + def __init__(self, *, mongo_conn_id, s3_conn_id, mongo_collection, @@ -50,8 +50,8 @@ def __init__(self, s3_key, mongo_db=None, replace=False, - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) # Conn Ids self.mongo_conn_id = mongo_conn_id self.s3_conn_id = s3_conn_id diff --git a/airflow/providers/amazon/aws/transfers/mysql_to_s3.py b/airflow/providers/amazon/aws/transfers/mysql_to_s3.py index 64d199227a8ce..249e4b24b208c 100644 --- a/airflow/providers/amazon/aws/transfers/mysql_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/mysql_to_s3.py @@ -68,7 +68,7 @@ class MySQLToS3Operator(BaseOperator): @apply_defaults def __init__( - self, + self, *, query: str, s3_bucket: str, s3_key: str, @@ -78,8 +78,8 @@ def __init__( pd_csv_kwargs: Optional[dict] = None, index: Optional[bool] = False, header: Optional[bool] = False, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.query = query self.s3_bucket = s3_bucket self.s3_key = s3_key diff --git a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py index 5c85506be55c0..9f1b113caf94f 100644 --- a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py @@ -71,7 +71,7 @@ class RedshiftToS3Operator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, schema: str, table: str, s3_bucket: str, @@ -83,8 +83,8 @@ def __init__( # pylint: disable=too-many-arguments autocommit: bool = False, include_header: bool = False, table_as_file_name: bool = True, # Set to True by default for not breaking current workflows - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.schema = schema self.table = table self.s3_bucket = s3_bucket diff --git a/airflow/providers/amazon/aws/transfers/s3_to_redshift.py b/airflow/providers/amazon/aws/transfers/s3_to_redshift.py index f2228e90a0f40..fd14da29916aa 100644 --- a/airflow/providers/amazon/aws/transfers/s3_to_redshift.py +++ b/airflow/providers/amazon/aws/transfers/s3_to_redshift.py @@ -64,7 +64,7 @@ class S3ToRedshiftOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, schema: str, table: str, s3_bucket: str, @@ -74,8 +74,8 @@ def __init__( verify: Optional[Union[bool, str]] = None, copy_options: Optional[List] = None, autocommit: bool = False, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.schema = schema self.table = table self.s3_bucket = s3_bucket diff --git a/airflow/providers/amazon/aws/transfers/s3_to_sftp.py b/airflow/providers/amazon/aws/transfers/s3_to_sftp.py index e2de4ad83de29..fd9246d416b3b 100644 --- a/airflow/providers/amazon/aws/transfers/s3_to_sftp.py +++ b/airflow/providers/amazon/aws/transfers/s3_to_sftp.py @@ -49,15 +49,14 @@ class S3ToSFTPOperator(BaseOperator): template_fields = ('s3_key', 'sftp_path') @apply_defaults - def __init__(self, + def __init__(self, *, s3_bucket, s3_key, sftp_path, sftp_conn_id='ssh_default', s3_conn_id='aws_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.sftp_conn_id = sftp_conn_id self.sftp_path = sftp_path self.s3_bucket = s3_bucket diff --git a/airflow/providers/amazon/aws/transfers/sftp_to_s3.py b/airflow/providers/amazon/aws/transfers/sftp_to_s3.py index b5f569d11743a..c1b6e65e5b82d 100644 --- a/airflow/providers/amazon/aws/transfers/sftp_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/sftp_to_s3.py @@ -49,15 +49,14 @@ class SFTPToS3Operator(BaseOperator): template_fields = ('s3_key', 'sftp_path') @apply_defaults - def __init__(self, + def __init__(self, *, s3_bucket, s3_key, sftp_path, sftp_conn_id='ssh_default', s3_conn_id='aws_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.sftp_conn_id = sftp_conn_id self.sftp_path = sftp_path self.s3_bucket = s3_bucket diff --git a/airflow/providers/apache/cassandra/example_dags/example_cassandra_dag.py b/airflow/providers/apache/cassandra/example_dags/example_cassandra_dag.py index ec9b491e6f473..ce736344b9c25 100644 --- a/airflow/providers/apache/cassandra/example_dags/example_cassandra_dag.py +++ b/airflow/providers/apache/cassandra/example_dags/example_cassandra_dag.py @@ -27,13 +27,13 @@ args = { 'owner': 'Airflow', - 'start_date': days_ago(2) } with DAG( dag_id='example_cassandra_operator', default_args=args, schedule_interval=None, + start_date=days_ago(2), tags=['example'] ) as dag: # [START howto_operator_cassandra_table_sensor] diff --git a/airflow/providers/apache/cassandra/sensors/record.py b/airflow/providers/apache/cassandra/sensors/record.py index 89fafc6443338..ea67ac5841c57 100644 --- a/airflow/providers/apache/cassandra/sensors/record.py +++ b/airflow/providers/apache/cassandra/sensors/record.py @@ -56,7 +56,7 @@ class CassandraRecordSensor(BaseSensorOperator): template_fields = ('table', 'keys') @apply_defaults - def __init__(self, table: str, keys: Dict[str, str], cassandra_conn_id: str, **kwargs: Any) -> None: + def __init__(self, *, table: str, keys: Dict[str, str], cassandra_conn_id: str, **kwargs: Any) -> None: super().__init__(**kwargs) self.cassandra_conn_id = cassandra_conn_id self.table = table diff --git a/airflow/providers/apache/cassandra/sensors/table.py b/airflow/providers/apache/cassandra/sensors/table.py index 89385773289ca..82cd411c0ada4 100644 --- a/airflow/providers/apache/cassandra/sensors/table.py +++ b/airflow/providers/apache/cassandra/sensors/table.py @@ -54,7 +54,7 @@ class CassandraTableSensor(BaseSensorOperator): template_fields = ('table',) @apply_defaults - def __init__(self, table: str, cassandra_conn_id: str, **kwargs: Any) -> None: + def __init__(self, *, table: str, cassandra_conn_id: str, **kwargs: Any) -> None: super().__init__(**kwargs) self.cassandra_conn_id = cassandra_conn_id self.table = table diff --git a/airflow/providers/apache/druid/operators/druid.py b/airflow/providers/apache/druid/operators/druid.py index bf8ee6ae6b091..f046ff1552a93 100644 --- a/airflow/providers/apache/druid/operators/druid.py +++ b/airflow/providers/apache/druid/operators/druid.py @@ -38,7 +38,7 @@ class DruidOperator(BaseOperator): template_ext = ('.json',) @apply_defaults - def __init__(self, json_index_file: str, + def __init__(self, *, json_index_file: str, druid_ingest_conn_id: str = 'druid_ingest_default', max_ingestion_time: Optional[int] = None, **kwargs: Any) -> None: diff --git a/airflow/providers/apache/druid/operators/druid_check.py b/airflow/providers/apache/druid/operators/druid_check.py index 380910cb35930..2f6114dfec0f7 100644 --- a/airflow/providers/apache/druid/operators/druid_check.py +++ b/airflow/providers/apache/druid/operators/druid_check.py @@ -59,6 +59,7 @@ class DruidCheckOperator(CheckOperator): @apply_defaults def __init__( self, + *, sql: str, druid_broker_conn_id: str = 'druid_broker_default', **kwargs: Any diff --git a/airflow/providers/apache/druid/transfers/hive_to_druid.py b/airflow/providers/apache/druid/transfers/hive_to_druid.py index 4f712bbadcb2b..595db0f5106bd 100644 --- a/airflow/providers/apache/druid/transfers/hive_to_druid.py +++ b/airflow/providers/apache/druid/transfers/hive_to_druid.py @@ -84,7 +84,7 @@ class HiveToDruidOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, sql: str, druid_datasource: str, ts_dim: str, diff --git a/airflow/providers/apache/hdfs/sensors/hdfs.py b/airflow/providers/apache/hdfs/sensors/hdfs.py index 2307c111ea695..757f350787104 100644 --- a/airflow/providers/apache/hdfs/sensors/hdfs.py +++ b/airflow/providers/apache/hdfs/sensors/hdfs.py @@ -37,6 +37,7 @@ class HdfsSensor(BaseSensorOperator): @apply_defaults def __init__(self, + *, filepath: str, hdfs_conn_id: str = 'hdfs_default', ignored_ext: Optional[List[str]] = None, diff --git a/airflow/providers/apache/hdfs/sensors/web_hdfs.py b/airflow/providers/apache/hdfs/sensors/web_hdfs.py index f380d8deb043b..8d21b3e84dfd1 100644 --- a/airflow/providers/apache/hdfs/sensors/web_hdfs.py +++ b/airflow/providers/apache/hdfs/sensors/web_hdfs.py @@ -29,6 +29,7 @@ class WebHdfsSensor(BaseSensorOperator): @apply_defaults def __init__(self, + *, filepath: str, webhdfs_conn_id: str = 'webhdfs_default', **kwargs: Any) -> None: diff --git a/airflow/providers/apache/hive/example_dags/example_twitter_dag.py b/airflow/providers/apache/hive/example_dags/example_twitter_dag.py index b6dace4b54aa7..7dc03df7854cf 100644 --- a/airflow/providers/apache/hive/example_dags/example_twitter_dag.py +++ b/airflow/providers/apache/hive/example_dags/example_twitter_dag.py @@ -73,7 +73,6 @@ def transfertodb(): default_args = { 'owner': 'Ekhtiar', 'depends_on_past': False, - 'start_date': days_ago(5), 'email': ['airflow@example.com'], 'email_on_failure': False, 'email_on_retry': False, @@ -89,6 +88,7 @@ def transfertodb(): dag_id='example_twitter_dag', default_args=default_args, schedule_interval="@daily", + start_date=days_ago(5), tags=['example'], ) as dag: diff --git a/airflow/providers/apache/hive/operators/hive.py b/airflow/providers/apache/hive/operators/hive.py index 7ab55fe16a799..48d75742de5ed 100644 --- a/airflow/providers/apache/hive/operators/hive.py +++ b/airflow/providers/apache/hive/operators/hive.py @@ -70,7 +70,7 @@ class HiveOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults def __init__( - self, + self, *, hql: str, hive_cli_conn_id: str = 'hive_cli_default', schema: str = 'default', diff --git a/airflow/providers/apache/hive/operators/hive_stats.py b/airflow/providers/apache/hive/operators/hive_stats.py index b9b47ac37774d..6fc689e82fc52 100644 --- a/airflow/providers/apache/hive/operators/hive_stats.py +++ b/airflow/providers/apache/hive/operators/hive_stats.py @@ -63,7 +63,7 @@ class HiveStatsCollectionOperator(BaseOperator): ui_color = '#aff7a6' @apply_defaults - def __init__(self, + def __init__(self, *, table: str, partition: Any, extra_exprs: Optional[Dict[str, Any]] = None, diff --git a/airflow/providers/apache/hive/sensors/hive_partition.py b/airflow/providers/apache/hive/sensors/hive_partition.py index 19f7918f5ef53..8e1b8279da048 100644 --- a/airflow/providers/apache/hive/sensors/hive_partition.py +++ b/airflow/providers/apache/hive/sensors/hive_partition.py @@ -46,7 +46,7 @@ class HivePartitionSensor(BaseSensorOperator): ui_color = '#C5CAE9' @apply_defaults - def __init__(self, + def __init__(self, *, table: str, partition: Optional[str] = "ds='{{ ds }}'", metastore_conn_id: str = 'metastore_default', diff --git a/airflow/providers/apache/hive/sensors/metastore_partition.py b/airflow/providers/apache/hive/sensors/metastore_partition.py index 5dbdcd398ae17..1e54440f3ada7 100644 --- a/airflow/providers/apache/hive/sensors/metastore_partition.py +++ b/airflow/providers/apache/hive/sensors/metastore_partition.py @@ -45,7 +45,7 @@ class MetastorePartitionSensor(SqlSensor): ui_color = '#8da7be' @apply_defaults - def __init__(self, + def __init__(self, *, table: str, partition_name: str, schema: str = "default", diff --git a/airflow/providers/apache/hive/sensors/named_hive_partition.py b/airflow/providers/apache/hive/sensors/named_hive_partition.py index 43cc1130fcb3d..f69e2b2d4ea51 100644 --- a/airflow/providers/apache/hive/sensors/named_hive_partition.py +++ b/airflow/providers/apache/hive/sensors/named_hive_partition.py @@ -42,7 +42,7 @@ class NamedHivePartitionSensor(BaseSensorOperator): ui_color = '#8d99ae' @apply_defaults - def __init__(self, + def __init__(self, *, partition_names: List[str], metastore_conn_id: str = 'metastore_default', poke_interval: int = 60 * 3, diff --git a/airflow/providers/apache/hive/transfers/hive_to_mysql.py b/airflow/providers/apache/hive/transfers/hive_to_mysql.py index c937ceda01356..724c7919f8e80 100644 --- a/airflow/providers/apache/hive/transfers/hive_to_mysql.py +++ b/airflow/providers/apache/hive/transfers/hive_to_mysql.py @@ -67,7 +67,7 @@ class HiveToMySqlOperator(BaseOperator): ui_color = '#a0e08c' @apply_defaults - def __init__(self, + def __init__(self, *, sql: str, mysql_table: str, hiveserver2_conn_id: str = 'hiveserver2_default', diff --git a/airflow/providers/apache/hive/transfers/hive_to_samba.py b/airflow/providers/apache/hive/transfers/hive_to_samba.py index 742df32981823..5f08b83d09487 100644 --- a/airflow/providers/apache/hive/transfers/hive_to_samba.py +++ b/airflow/providers/apache/hive/transfers/hive_to_samba.py @@ -48,7 +48,7 @@ class HiveToSambaOperator(BaseOperator): template_ext = ('.hql', '.sql',) @apply_defaults - def __init__(self, + def __init__(self, *, hql: str, destination_filepath: str, samba_conn_id: str = 'samba_default', diff --git a/airflow/providers/apache/hive/transfers/mssql_to_hive.py b/airflow/providers/apache/hive/transfers/mssql_to_hive.py index 77e63ebd419d3..01a932724f9cb 100644 --- a/airflow/providers/apache/hive/transfers/mssql_to_hive.py +++ b/airflow/providers/apache/hive/transfers/mssql_to_hive.py @@ -76,7 +76,7 @@ class MsSqlToHiveOperator(BaseOperator): ui_color = '#a0e08c' @apply_defaults - def __init__(self, + def __init__(self, *, sql: str, hive_table: str, create: bool = True, diff --git a/airflow/providers/apache/hive/transfers/mysql_to_hive.py b/airflow/providers/apache/hive/transfers/mysql_to_hive.py index b3d070f27e474..99650ec59f024 100644 --- a/airflow/providers/apache/hive/transfers/mysql_to_hive.py +++ b/airflow/providers/apache/hive/transfers/mysql_to_hive.py @@ -86,6 +86,7 @@ class MySqlToHiveOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments self, + *, sql: str, hive_table: str, create: bool = True, diff --git a/airflow/providers/apache/hive/transfers/s3_to_hive.py b/airflow/providers/apache/hive/transfers/s3_to_hive.py index 734d3335f03bf..6c730a0a782bd 100644 --- a/airflow/providers/apache/hive/transfers/s3_to_hive.py +++ b/airflow/providers/apache/hive/transfers/s3_to_hive.py @@ -108,6 +108,7 @@ class S3ToHiveOperator(BaseOperator): # pylint: disable=too-many-instance-attri @apply_defaults def __init__( # pylint: disable=too-many-arguments self, + *, s3_key: str, field_dict: Dict, hive_table: str, diff --git a/airflow/providers/apache/hive/transfers/vertica_to_hive.py b/airflow/providers/apache/hive/transfers/vertica_to_hive.py index 1b9a4d63328e9..02a4f80ab2c66 100644 --- a/airflow/providers/apache/hive/transfers/vertica_to_hive.py +++ b/airflow/providers/apache/hive/transfers/vertica_to_hive.py @@ -74,6 +74,7 @@ class VerticaToHiveOperator(BaseOperator): @apply_defaults def __init__( self, + *, sql, hive_table, create=True, diff --git a/airflow/providers/apache/kylin/example_dags/example_kylin_dag.py b/airflow/providers/apache/kylin/example_dags/example_kylin_dag.py index eb5aa9208e7f4..5b9ec1572ea06 100644 --- a/airflow/providers/apache/kylin/example_dags/example_kylin_dag.py +++ b/airflow/providers/apache/kylin/example_dags/example_kylin_dag.py @@ -27,13 +27,13 @@ args = { 'owner': 'airflow', - 'start_date': days_ago(1), } dag = DAG( dag_id='example_kylin_operator', default_args=args, schedule_interval=None, + start_date=days_ago(1), tags=['example'] ) diff --git a/airflow/providers/apache/kylin/operators/kylin_cube.py b/airflow/providers/apache/kylin/operators/kylin_cube.py index 7b8e907c78937..cf53b63ce4211 100644 --- a/airflow/providers/apache/kylin/operators/kylin_cube.py +++ b/airflow/providers/apache/kylin/operators/kylin_cube.py @@ -96,7 +96,7 @@ class KylinCubeOperator(BaseOperator): # pylint: disable=too-many-arguments,inconsistent-return-statements @apply_defaults - def __init__(self, + def __init__(self, *, kylin_conn_id: Optional[str] = 'kylin_default', project: Optional[str] = None, cube: Optional[str] = None, diff --git a/airflow/providers/apache/livy/example_dags/example_livy.py b/airflow/providers/apache/livy/example_dags/example_livy.py index 91fb61e2ca206..9e561c9b26606 100644 --- a/airflow/providers/apache/livy/example_dags/example_livy.py +++ b/airflow/providers/apache/livy/example_dags/example_livy.py @@ -28,14 +28,14 @@ args = { 'owner': 'airflow', 'email': ['airflow@example.com'], - 'depends_on_past': False, - 'start_date': days_ago(5) + 'depends_on_past': False } with DAG( dag_id='example_livy_operator', default_args=args, - schedule_interval='@daily' + schedule_interval='@daily', + start_date=days_ago(5), ) as dag: livy_java_task = LivyOperator( diff --git a/airflow/providers/apache/livy/operators/livy.py b/airflow/providers/apache/livy/operators/livy.py index 44cc29d8ee4a8..16be339f3f007 100644 --- a/airflow/providers/apache/livy/operators/livy.py +++ b/airflow/providers/apache/livy/operators/livy.py @@ -74,7 +74,7 @@ class LivyOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, file: str, class_name: Optional[str] = None, args: Optional[Sequence[Union[str, int, float]]] = None, diff --git a/airflow/providers/apache/livy/sensors/livy.py b/airflow/providers/apache/livy/sensors/livy.py index bb54a9aa6e43e..b9d0bc429de0b 100644 --- a/airflow/providers/apache/livy/sensors/livy.py +++ b/airflow/providers/apache/livy/sensors/livy.py @@ -39,7 +39,7 @@ class LivySensor(BaseSensorOperator): @apply_defaults def __init__( - self, + self, *, batch_id: Union[int, str], livy_conn_id: str = 'livy_default', **kwargs: Any diff --git a/airflow/providers/apache/pig/example_dags/example_pig.py b/airflow/providers/apache/pig/example_dags/example_pig.py index f54a64d6a31c2..8917f86cfe01f 100644 --- a/airflow/providers/apache/pig/example_dags/example_pig.py +++ b/airflow/providers/apache/pig/example_dags/example_pig.py @@ -24,13 +24,13 @@ args = { 'owner': 'airflow', - 'start_date': days_ago(2), } dag = DAG( dag_id='example_pig_operator', default_args=args, schedule_interval=None, + start_date=days_ago(2), tags=['example'] ) diff --git a/airflow/providers/apache/pig/operators/pig.py b/airflow/providers/apache/pig/operators/pig.py index f80754e4ef305..3f3c57897a18b 100644 --- a/airflow/providers/apache/pig/operators/pig.py +++ b/airflow/providers/apache/pig/operators/pig.py @@ -47,7 +47,7 @@ class PigOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, pig: str, pig_cli_conn_id: str = 'pig_cli_default', pigparams_jinja_translate: bool = False, diff --git a/airflow/providers/apache/spark/example_dags/example_spark_dag.py b/airflow/providers/apache/spark/example_dags/example_spark_dag.py index c25bd4cbfaf55..5d279e04e2652 100644 --- a/airflow/providers/apache/spark/example_dags/example_spark_dag.py +++ b/airflow/providers/apache/spark/example_dags/example_spark_dag.py @@ -28,13 +28,13 @@ args = { 'owner': 'Airflow', - 'start_date': days_ago(2) } with DAG( dag_id='example_spark_operator', default_args=args, schedule_interval=None, + start_date=days_ago(2), tags=['example'] ) as dag: # [START howto_operator_spark_submit] diff --git a/airflow/providers/apache/spark/operators/spark_jdbc.py b/airflow/providers/apache/spark/operators/spark_jdbc.py index c7f8fde11aa64..8c1c7be494559 100644 --- a/airflow/providers/apache/spark/operators/spark_jdbc.py +++ b/airflow/providers/apache/spark/operators/spark_jdbc.py @@ -120,7 +120,7 @@ class SparkJDBCOperator(SparkSubmitOperator): # pylint: disable=too-many-arguments,too-many-locals @apply_defaults - def __init__(self, + def __init__(self, *, spark_app_name: str = 'airflow-spark-jdbc', spark_conn_id: str = 'spark-default', spark_conf: Optional[Dict[str, Any]] = None, diff --git a/airflow/providers/apache/spark/operators/spark_sql.py b/airflow/providers/apache/spark/operators/spark_sql.py index 796228a632c6e..59a1001a582f1 100644 --- a/airflow/providers/apache/spark/operators/spark_sql.py +++ b/airflow/providers/apache/spark/operators/spark_sql.py @@ -64,7 +64,7 @@ class SparkSqlOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults - def __init__(self, + def __init__(self, *, sql: str, conf: Optional[str] = None, conn_id: str = 'spark_sql_default', diff --git a/airflow/providers/apache/spark/operators/spark_submit.py b/airflow/providers/apache/spark/operators/spark_submit.py index 87208d9701db6..24d684ae140ca 100644 --- a/airflow/providers/apache/spark/operators/spark_submit.py +++ b/airflow/providers/apache/spark/operators/spark_submit.py @@ -102,7 +102,7 @@ class SparkSubmitOperator(BaseOperator): # pylint: disable=too-many-arguments,too-many-locals @apply_defaults - def __init__(self, + def __init__(self, *, application: str = '', conf: Optional[Dict[str, Any]] = None, conn_id: str = 'spark_default', diff --git a/airflow/providers/apache/sqoop/operators/sqoop.py b/airflow/providers/apache/sqoop/operators/sqoop.py index 1ae9841428701..3400360fc5d40 100644 --- a/airflow/providers/apache/sqoop/operators/sqoop.py +++ b/airflow/providers/apache/sqoop/operators/sqoop.py @@ -95,7 +95,7 @@ class SqoopOperator(BaseOperator): # pylint: disable=too-many-arguments,too-many-locals @apply_defaults - def __init__(self, + def __init__(self, *, conn_id: str = 'sqoop_default', cmd_type: str = 'import', table: Optional[str] = None, diff --git a/airflow/providers/celery/sensors/celery_queue.py b/airflow/providers/celery/sensors/celery_queue.py index f9b09e57d33b6..ff0b466c145f9 100644 --- a/airflow/providers/celery/sensors/celery_queue.py +++ b/airflow/providers/celery/sensors/celery_queue.py @@ -37,13 +37,12 @@ class CeleryQueueSensor(BaseSensorOperator): """ @apply_defaults def __init__( - self, + self, *, celery_queue: str, target_task_id: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.celery_queue = celery_queue self.target_task_id = target_task_id diff --git a/airflow/providers/cncf/kubernetes/example_dags/example_kubernetes.py b/airflow/providers/cncf/kubernetes/example_dags/example_kubernetes.py index 3e2f42473b9d9..17f8e56970d42 100644 --- a/airflow/providers/cncf/kubernetes/example_dags/example_kubernetes.py +++ b/airflow/providers/cncf/kubernetes/example_dags/example_kubernetes.py @@ -121,13 +121,13 @@ default_args = { 'owner': 'airflow', - 'start_date': days_ago(2) } with DAG( dag_id='example_kubernetes_operator', default_args=default_args, schedule_interval=None, + start_date=days_ago(2), tags=['example'], ) as dag: k = KubernetesPodOperator( diff --git a/airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes.py b/airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes.py index 4572ec3697ba5..c88b73bf78216 100644 --- a/airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes.py +++ b/airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes.py @@ -43,7 +43,6 @@ default_args = { 'owner': 'airflow', 'depends_on_past': False, - 'start_date': days_ago(1), 'email': ['airflow@example.com'], 'email_on_failure': False, 'email_on_retry': False, @@ -58,6 +57,7 @@ default_args=default_args, description='submit spark-pi as sparkApplication on kubernetes', schedule_interval=timedelta(days=1), + start_date=days_ago(1), ) t1 = SparkKubernetesOperator( diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py index d6ed8c16b4d41..63f3f97f5b1c3 100644 --- a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py +++ b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py @@ -45,7 +45,8 @@ class KubernetesPodOperator(BaseOperator): # pylint: disable=too-many-instance- :ref:`howto/operator:KubernetesPodOperator` .. note:: - If you use `Google Kubernetes Engine `__, use + If you use `Google Kubernetes Engine `__ + and Airflow is not running in the same cluster, consider using :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEStartPodOperator`, which simplifies the authorization process. @@ -146,10 +147,12 @@ class KubernetesPodOperator(BaseOperator): # pylint: disable=too-many-instance- :param priority_class_name: priority class name for the launched Pod :type priority_class_name: str """ - template_fields: Iterable[str] = ('cmds', 'arguments', 'env_vars', 'config_file', 'pod_template_file') + template_fields: Iterable[str] = ( + 'image', 'cmds', 'arguments', 'env_vars', 'config_file', 'pod_template_file') @apply_defaults def __init__(self, # pylint: disable=too-many-arguments,too-many-locals + *, namespace: Optional[str] = None, image: Optional[str] = None, name: Optional[str] = None, @@ -286,9 +289,11 @@ def execute(self, context) -> Optional[str]: self.reattach_on_restart: self.log.info("found a running pod with labels %s but a different try_number" "Will attach to this pod and monitor instead of starting new one", labels) - final_state, _, result = self.create_new_pod_for_operator(labels, launcher) + final_state, result = self.monitor_launched_pod(launcher, pod_list.items[0]) elif len(pod_list.items) == 1: - final_state, result = self.monitor_launched_pod(launcher, pod_list[0]) + self.log.info("found a running pod with labels %s." + "Will monitor this pod instead of starting new one", labels) + final_state, result = self.monitor_launched_pod(launcher, pod_list.items[0]) else: self.log.info("creating pod with labels %s and launcher %s", labels, launcher) final_state, _, result = self.create_new_pod_for_operator(labels, launcher) diff --git a/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py b/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py index 504980b2a76da..80f980d6459ee 100644 --- a/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py +++ b/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py @@ -43,12 +43,12 @@ class SparkKubernetesOperator(BaseOperator): ui_color = '#f4a460' @apply_defaults - def __init__(self, + def __init__(self, *, application_file: str, namespace: Optional[str] = None, kubernetes_conn_id: str = 'kubernetes_default', - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.application_file = application_file self.namespace = namespace self.kubernetes_conn_id = kubernetes_conn_id diff --git a/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py b/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py index e6b6ac1fe2f93..934be58361cfc 100644 --- a/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py +++ b/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py @@ -44,12 +44,12 @@ class SparkKubernetesSensor(BaseSensorOperator): SUCCESS_STATES = ('COMPLETED',) @apply_defaults - def __init__(self, + def __init__(self, *, application_name: str, namespace: Optional[str] = None, kubernetes_conn_id: str = 'kubernetes_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.application_name = application_name self.namespace = namespace self.kubernetes_conn_id = kubernetes_conn_id diff --git a/airflow/providers/databricks/example_dags/example_databricks.py b/airflow/providers/databricks/example_dags/example_databricks.py index 73f3fbf34acf3..55e5c53e915b3 100644 --- a/airflow/providers/databricks/example_dags/example_databricks.py +++ b/airflow/providers/databricks/example_dags/example_databricks.py @@ -39,13 +39,13 @@ 'owner': 'airflow', 'email': ['airflow@example.com'], 'depends_on_past': False, - 'start_date': days_ago(2) } with DAG( dag_id='example_databricks_operator', default_args=default_args, schedule_interval='@daily', + start_date=days_ago(2), tags=['example'], ) as dag: new_cluster = { diff --git a/airflow/providers/databricks/operators/databricks.py b/airflow/providers/databricks/operators/databricks.py index 99e81ef74df24..3bc8f5a67d2f9 100644 --- a/airflow/providers/databricks/operators/databricks.py +++ b/airflow/providers/databricks/operators/databricks.py @@ -245,7 +245,7 @@ class DatabricksSubmitRunOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults def __init__( - self, + self, *, json=None, spark_jar_task=None, notebook_task=None, @@ -457,7 +457,7 @@ class DatabricksRunNowOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults def __init__( - self, + self, *, job_id=None, json=None, notebook_params=None, diff --git a/airflow/providers/datadog/sensors/datadog.py b/airflow/providers/datadog/sensors/datadog.py index a24f46e77a844..29969d977f1cc 100644 --- a/airflow/providers/datadog/sensors/datadog.py +++ b/airflow/providers/datadog/sensors/datadog.py @@ -40,7 +40,7 @@ class DatadogSensor(BaseSensorOperator): @apply_defaults def __init__( - self, + self, *, datadog_conn_id: str = 'datadog_default', from_seconds_ago: int = 3600, up_to_seconds_from_now: int = 0, @@ -48,9 +48,8 @@ def __init__( sources: Optional[str] = None, tags: Optional[List[str]] = None, response_check: Optional[Callable[[Dict[str, Any]], bool]] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.datadog_conn_id = datadog_conn_id self.from_seconds_ago = from_seconds_ago self.up_to_seconds_from_now = up_to_seconds_from_now diff --git a/airflow/providers/dingding/example_dags/example_dingding.py b/airflow/providers/dingding/example_dags/example_dingding.py index 04757784455e0..f2b483b7e4626 100644 --- a/airflow/providers/dingding/example_dags/example_dingding.py +++ b/airflow/providers/dingding/example_dags/example_dingding.py @@ -27,7 +27,6 @@ args = { 'owner': 'airflow', 'retries': 3, - 'start_date': days_ago(2) } @@ -63,6 +62,7 @@ def failure_callback(context): default_args=args, schedule_interval='@once', dagrun_timeout=timedelta(minutes=60), + start_date=days_ago(2), tags=['example'], ) as dag: diff --git a/airflow/providers/dingding/operators/dingding.py b/airflow/providers/dingding/operators/dingding.py index 40f9872c53d43..0d1ba6687ca94 100644 --- a/airflow/providers/dingding/operators/dingding.py +++ b/airflow/providers/dingding/operators/dingding.py @@ -46,15 +46,14 @@ class DingdingOperator(BaseOperator): ui_color = '#4ea4d4' # Dingding icon color @apply_defaults - def __init__(self, + def __init__(self, *, dingding_conn_id='dingding_default', message_type='text', message=None, at_mobiles=None, at_all=False, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.dingding_conn_id = dingding_conn_id self.message_type = message_type self.message = message diff --git a/airflow/providers/discord/operators/discord_webhook.py b/airflow/providers/discord/operators/discord_webhook.py index b51f51460caae..6b97920e2ea20 100644 --- a/airflow/providers/discord/operators/discord_webhook.py +++ b/airflow/providers/discord/operators/discord_webhook.py @@ -57,7 +57,7 @@ class DiscordWebhookOperator(SimpleHttpOperator): template_fields = ['username', 'message'] @apply_defaults - def __init__(self, + def __init__(self, *, http_conn_id: Optional[str] = None, webhook_endpoint: Optional[str] = None, message: str = "", diff --git a/airflow/providers/docker/example_dags/example_docker.py b/airflow/providers/docker/example_dags/example_docker.py index 357ce5abb9bdf..627f6300a8093 100644 --- a/airflow/providers/docker/example_dags/example_docker.py +++ b/airflow/providers/docker/example_dags/example_docker.py @@ -25,7 +25,6 @@ default_args = { 'owner': 'airflow', 'depends_on_past': False, - 'start_date': days_ago(2), 'email': ['airflow@example.com'], 'email_on_failure': False, 'email_on_retry': False, @@ -34,7 +33,11 @@ } dag = DAG( - 'docker_sample', default_args=default_args, schedule_interval=timedelta(minutes=10)) + 'docker_sample', + default_args=default_args, + schedule_interval=timedelta(minutes=10), + start_date=days_ago(2), +) t1 = BashOperator( task_id='print_date', diff --git a/airflow/providers/docker/example_dags/example_docker_copy_data.py b/airflow/providers/docker/example_dags/example_docker_copy_data.py index 0ae47eaf61474..a96ed926bf134 100644 --- a/airflow/providers/docker/example_dags/example_docker_copy_data.py +++ b/airflow/providers/docker/example_dags/example_docker_copy_data.py @@ -36,7 +36,6 @@ default_args = { "owner": "airflow", "depends_on_past": False, - "start_date": days_ago(2), "email": ["airflow@example.com"], "email_on_failure": False, "email_on_retry": False, @@ -44,7 +43,12 @@ "retry_delay": timedelta(minutes=5), } -dag = DAG("docker_sample_copy_data", default_args=default_args, schedule_interval=timedelta(minutes=10)) +dag = DAG( + "docker_sample_copy_data", + default_args=default_args, + schedule_interval=timedelta(minutes=10), + start_date=days_ago(2), +) locate_file_cmd = """ sleep 10 diff --git a/airflow/providers/docker/example_dags/example_docker_swarm.py b/airflow/providers/docker/example_dags/example_docker_swarm.py index d7174fee2ea79..7dc056edad0bb 100644 --- a/airflow/providers/docker/example_dags/example_docker_swarm.py +++ b/airflow/providers/docker/example_dags/example_docker_swarm.py @@ -24,7 +24,6 @@ default_args = { 'owner': 'airflow', 'depends_on_past': False, - 'start_date': days_ago(1), 'email': ['airflow@example.com'], 'email_on_failure': False, 'email_on_retry': False @@ -34,6 +33,7 @@ 'docker_swarm_sample', default_args=default_args, schedule_interval=timedelta(minutes=10), + start_date=days_ago(1), catchup=False ) diff --git a/airflow/providers/docker/hooks/docker.py b/airflow/providers/docker/hooks/docker.py index f84991d6ea6fc..bae0e7f5a046f 100644 --- a/airflow/providers/docker/hooks/docker.py +++ b/airflow/providers/docker/hooks/docker.py @@ -15,6 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from typing import Optional from docker import APIClient from docker.errors import APIError @@ -34,10 +35,10 @@ class DockerHook(BaseHook, LoggingMixin): """ def __init__(self, docker_conn_id='docker_default', - base_url=None, - version=None, - tls=None - ): + base_url: Optional[str] = None, + version: Optional[str] = None, + tls: Optional[str] = None + ) -> None: super().__init__() if not base_url: raise AirflowException('No Docker base URL provided') @@ -63,7 +64,7 @@ def __init__(self, self.__email = extra_options.get('email') self.__reauth = extra_options.get('reauth') != 'no' - def get_conn(self): + def get_conn(self) -> APIClient: client = APIClient( base_url=self.__base_url, version=self.__version, @@ -72,7 +73,7 @@ def get_conn(self): self.__login(client) return client - def __login(self, client): + def __login(self, client) -> None: self.log.debug('Logging into Docker registry') try: client.login( diff --git a/airflow/providers/docker/operators/docker.py b/airflow/providers/docker/operators/docker.py index 40d601ca20af7..d7be1a7f4c6fc 100644 --- a/airflow/providers/docker/operators/docker.py +++ b/airflow/providers/docker/operators/docker.py @@ -132,7 +132,7 @@ class DockerOperator(BaseOperator): # pylint: disable=too-many-arguments,too-many-locals @apply_defaults def __init__( - self, + self, *, image: str, api_version: Optional[str] = None, command: Optional[Union[str, List[str]]] = None, @@ -152,7 +152,7 @@ def __init__( tls_ssl_version: Optional[str] = None, tmp_dir: str = '/tmp/airflow', user: Optional[Union[str, int]] = None, - volumes: Optional[Iterable[str]] = None, + volumes: Optional[List[str]] = None, working_dir: Optional[str] = None, xcom_all: bool = False, docker_conn_id: Optional[str] = None, @@ -162,10 +162,9 @@ def __init__( shm_size: Optional[int] = None, tty: Optional[bool] = False, cap_add: Optional[Iterable[str]] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.api_version = api_version self.auto_remove = auto_remove self.command = command @@ -214,7 +213,7 @@ def get_hook(self) -> DockerHook: tls=self.__get_tls_config() ) - def _run_image(self): + def _run_image(self) -> Optional[str]: """ Run a Docker container with the provided image """ @@ -223,6 +222,8 @@ def _run_image(self): with TemporaryDirectory(prefix='airflowtmp', dir=self.host_tmp_dir) as host_tmp_dir: self.volumes.append('{0}:{1}'.format(host_tmp_dir, self.tmp_dir)) + if not self.cli: + raise Exception("The 'cli' should be initialized before!") self.container = self.cli.create_container( command=self.get_command(), name=self.container_name, @@ -254,6 +255,7 @@ def _run_image(self): for line in lines: line = line.strip() if hasattr(line, 'decode'): + # Note that lines returned can also be byte sequences so we have to handle decode here line = line.decode('utf-8') self.log.info(line) @@ -272,8 +274,10 @@ def _run_image(self): return ret - def execute(self, context): + def execute(self, context) -> Optional[str]: self.cli = self._get_cli() + if not self.cli: + raise Exception("The 'cli' should be initialized before!") # Pull the docker image if `force_pull` is set or image does not exist locally if self.force_pull or not self.cli.images(name=self.image): @@ -285,10 +289,9 @@ def execute(self, context): self.log.info("%s", output['status']) self.environment['AIRFLOW_TMP_DIR'] = self.tmp_dir - return self._run_image() - def _get_cli(self): + def _get_cli(self) -> APIClient: if self.docker_conn_id: return self.get_hook().get_conn() else: @@ -299,7 +302,7 @@ def _get_cli(self): tls=tls_config ) - def get_command(self): + def get_command(self) -> Union[List[str], str]: """ Retrieve command(s). if command string starts with [, it returns the command list) @@ -312,12 +315,12 @@ def get_command(self): commands = self.command return commands - def on_kill(self): + def on_kill(self) -> None: if self.cli is not None: self.log.info('Stopping docker container') self.cli.stop(self.container['Id']) - def __get_tls_config(self): + def __get_tls_config(self) -> Optional[tls.TLSConfig]: tls_config = None if self.tls_ca_cert and self.tls_client_cert and self.tls_client_key: # Ignore type error on SSL version here - it is deprecated and type annotation is wrong diff --git a/airflow/providers/docker/operators/docker_swarm.py b/airflow/providers/docker/operators/docker_swarm.py index ea83c5c37a405..904fee2bb0fcb 100644 --- a/airflow/providers/docker/operators/docker_swarm.py +++ b/airflow/providers/docker/operators/docker_swarm.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. """Run ephemeral Docker Swarm services""" +from typing import Optional import requests from docker import types @@ -98,25 +99,26 @@ class DockerSwarmOperator(DockerOperator): @apply_defaults def __init__( self, - image, - enable_logging=True, - *args, - **kwargs): - super().__init__(image=image, *args, **kwargs) + *, + image: str, + enable_logging: bool = True, + **kwargs) -> None: + super().__init__(image=image, **kwargs) self.enable_logging = enable_logging self.service = None - def execute(self, context): + def execute(self, context) -> None: self.cli = self._get_cli() self.environment['AIRFLOW_TMP_DIR'] = self.tmp_dir return self._run_service() - def _run_service(self): + def _run_service(self) -> None: self.log.info('Starting docker service from image %s', self.image) - + if not self.cli: + raise Exception("The 'cli' should be initialized before!") self.service = self.cli.create_service( types.TaskTemplate( container_spec=types.ContainerSpec( @@ -148,20 +150,28 @@ def _run_service(self): break if self.auto_remove: + if not self.service: + raise Exception("The 'service' should be initialized before!") self.cli.remove_service(self.service['ID']) if self._service_status() == 'failed': raise AirflowException('Service failed: ' + repr(self.service)) - def _service_status(self): + def _service_status(self) -> Optional[str]: + if not self.cli: + raise Exception("The 'cli' should be initialized before!") return self.cli.tasks( filters={'service': self.service['ID']} )[0]['Status']['State'] - def _has_service_terminated(self): + def _has_service_terminated(self) -> bool: status = self._service_status() - return (status in ['failed', 'complete']) + return status in ['failed', 'complete'] - def _stream_logs_to_output(self): + def _stream_logs_to_output(self) -> None: + if not self.cli: + raise Exception("The 'cli' should be initialized before!") + if not self.service: + raise Exception("The 'service' should be initialized before!") logs = self.cli.service_logs( self.service['ID'], follow=True, stdout=True, stderr=True, is_tty=self.tty ) @@ -192,7 +202,7 @@ def _stream_logs_to_output(self): if line: self.log.info(line) - def on_kill(self): + def on_kill(self) -> None: if self.cli is not None: self.log.info('Removing docker service: %s', self.service['ID']) self.cli.remove_service(self.service['ID']) diff --git a/airflow/providers/email/PROVIDERS_CHANGES_2020.06.24.md b/airflow/providers/email/PROVIDERS_CHANGES_2020.06.24.md deleted file mode 100644 index 99caf3c9ccfc8..0000000000000 --- a/airflow/providers/email/PROVIDERS_CHANGES_2020.06.24.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/email/README.md b/airflow/providers/email/README.md deleted file mode 100644 index 212c8a538c4ab..0000000000000 --- a/airflow/providers/email/README.md +++ /dev/null @@ -1,103 +0,0 @@ - - - -# Package apache-airflow-backport-providers-email - -Release: 2020.6.24 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [Compatibility](#compatibility) -- [Provider class summary](#provider-class-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) -- [Releases](#releases) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `email` provider. All classes for this provider package -are in `airflow.providers.email` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-email` - -## Compatibility - -For full compatibility and test status of the backport packages check -[Airflow Backport Package Compatibility](https://cwiki.apache.org/confluence/display/AIRFLOW/Backported+providers+packages+for+Airflow+1.10.*+series) - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `email` provider -are in the `airflow.providers.email` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.email` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------| -| [operators.email.EmailOperator](https://github.com/apache/airflow/blob/master/airflow/providers/email/operators/email.py) | [operators.email_operator.EmailOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/email_operator.py) | - - - - - - - - - - - -## Releases - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/exasol/operators/exasol.py b/airflow/providers/exasol/operators/exasol.py index 090f1b989e513..e4ac6b6d6976e 100644 --- a/airflow/providers/exasol/operators/exasol.py +++ b/airflow/providers/exasol/operators/exasol.py @@ -47,14 +47,14 @@ class ExasolOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, sql: str, exasol_conn_id: str = 'exasol_default', autocommit: bool = False, parameters: Optional[Mapping] = None, schema: Optional[str] = None, - *args, **kwargs): - super(ExasolOperator, self).__init__(*args, **kwargs) + **kwargs): + super(ExasolOperator, self).__init__(**kwargs) self.exasol_conn_id = exasol_conn_id self.sql = sql self.autocommit = autocommit diff --git a/airflow/providers/ftp/sensors/ftp.py b/airflow/providers/ftp/sensors/ftp.py index a36e558584b23..878b7e33b4841 100644 --- a/airflow/providers/ftp/sensors/ftp.py +++ b/airflow/providers/ftp/sensors/ftp.py @@ -45,13 +45,12 @@ class FTPSensor(BaseSensorOperator): @apply_defaults def __init__( - self, + self, *, path: str, ftp_conn_id: str = 'ftp_default', fail_on_transient_errors: bool = True, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.path = path self.ftp_conn_id = ftp_conn_id diff --git a/airflow/providers/google/ads/example_dags/example_ads.py b/airflow/providers/google/ads/example_dags/example_ads.py index f1682e78958a1..43bdcd34f6c93 100644 --- a/airflow/providers/google/ads/example_dags/example_ads.py +++ b/airflow/providers/google/ads/example_dags/example_ads.py @@ -64,12 +64,10 @@ # [END howto_google_ads_env_variables] -default_args = {"start_date": dates.days_ago(1)} - with models.DAG( "example_google_ads", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=dates.days_ago(1), ) as dag: # [START howto_google_ads_to_gcs_operator] run_operator = GoogleAdsToGcsOperator( diff --git a/airflow/providers/google/ads/operators/ads.py b/airflow/providers/google/ads/operators/ads.py index 950dc1f2095d3..e47d50951a3b7 100644 --- a/airflow/providers/google/ads/operators/ads.py +++ b/airflow/providers/google/ads/operators/ads.py @@ -63,16 +63,15 @@ class GoogleAdsListAccountsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, bucket: str, object_name: str, gcp_conn_id: str = "google_cloud_default", google_ads_conn_id: str = "google_ads_default", gzip: bool = False, - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.bucket = bucket self.object_name = object_name self.gcp_conn_id = gcp_conn_id diff --git a/airflow/providers/google/ads/transfers/ads_to_gcs.py b/airflow/providers/google/ads/transfers/ads_to_gcs.py index 90a57b2713043..413c3b8aa9efd 100644 --- a/airflow/providers/google/ads/transfers/ads_to_gcs.py +++ b/airflow/providers/google/ads/transfers/ads_to_gcs.py @@ -64,7 +64,7 @@ class GoogleAdsToGcsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, client_ids: List[str], query: str, attributes: List[str], @@ -74,10 +74,9 @@ def __init__( google_ads_conn_id: str = "google_ads_default", page_size: int = 10000, gzip: bool = False, - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.client_ids = client_ids self.query = query self.attributes = attributes diff --git a/airflow/providers/google/cloud/example_dags/example_automl_nl_text_classification.py b/airflow/providers/google/cloud/example_dags/example_automl_nl_text_classification.py index b7fc05d788581..6648b149a902a 100644 --- a/airflow/providers/google/cloud/example_dags/example_automl_nl_text_classification.py +++ b/airflow/providers/google/cloud/example_dags/example_automl_nl_text_classification.py @@ -53,14 +53,13 @@ IMPORT_INPUT_CONFIG = {"gcs_source": {"input_uris": [GCP_AUTOML_TEXT_CLS_BUCKET]}} -default_args = {"start_date": days_ago(1)} extract_object_id = CloudAutoMLHook.extract_object_id # Example DAG for AutoML Natural Language Text Classification with models.DAG( "example_automl_text_cls", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as example_dag: create_dataset_task = AutoMLCreateDatasetOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_automl_nl_text_extraction.py b/airflow/providers/google/cloud/example_dags/example_automl_nl_text_extraction.py index 7d1e8f2d81adc..0be33e6ed76c4 100644 --- a/airflow/providers/google/cloud/example_dags/example_automl_nl_text_extraction.py +++ b/airflow/providers/google/cloud/example_dags/example_automl_nl_text_extraction.py @@ -50,14 +50,13 @@ IMPORT_INPUT_CONFIG = {"gcs_source": {"input_uris": [GCP_AUTOML_TEXT_BUCKET]}} -default_args = {"start_date": days_ago(1)} extract_object_id = CloudAutoMLHook.extract_object_id # Example DAG for AutoML Natural Language Entities Extraction with models.DAG( "example_automl_text", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), user_defined_macros={"extract_object_id": extract_object_id}, tags=['example'], ) as example_dag: diff --git a/airflow/providers/google/cloud/example_dags/example_automl_nl_text_sentiment.py b/airflow/providers/google/cloud/example_dags/example_automl_nl_text_sentiment.py index 31c6a9797489d..98a99b3f576ad 100644 --- a/airflow/providers/google/cloud/example_dags/example_automl_nl_text_sentiment.py +++ b/airflow/providers/google/cloud/example_dags/example_automl_nl_text_sentiment.py @@ -53,14 +53,13 @@ IMPORT_INPUT_CONFIG = {"gcs_source": {"input_uris": [GCP_AUTOML_SENTIMENT_BUCKET]}} -default_args = {"start_date": days_ago(1)} extract_object_id = CloudAutoMLHook.extract_object_id # Example DAG for AutoML Natural Language Text Sentiment with models.DAG( "example_automl_text_sentiment", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), user_defined_macros={"extract_object_id": extract_object_id}, tags=['example'], ) as example_dag: diff --git a/airflow/providers/google/cloud/example_dags/example_automl_tables.py b/airflow/providers/google/cloud/example_dags/example_automl_tables.py index 78ecd10a18721..11d4049fffc05 100644 --- a/airflow/providers/google/cloud/example_dags/example_automl_tables.py +++ b/airflow/providers/google/cloud/example_dags/example_automl_tables.py @@ -59,7 +59,6 @@ IMPORT_INPUT_CONFIG = {"gcs_source": {"input_uris": [GCP_AUTOML_DATASET_BUCKET]}} -default_args = {"start_date": days_ago(1)} extract_object_id = CloudAutoMLHook.extract_object_id @@ -76,8 +75,8 @@ def get_target_column_spec(columns_specs: List[Dict], column_name: str) -> str: # Example DAG to create dataset, train model_id and deploy it. with models.DAG( "example_create_and_deploy", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), user_defined_macros={ "get_target_column_spec": get_target_column_spec, "target": TARGET, @@ -184,8 +183,8 @@ def get_target_column_spec(columns_specs: List[Dict], column_name: str) -> str: # Example DAG for AutoML datasets operations with models.DAG( "example_automl_dataset", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), user_defined_macros={"extract_object_id": extract_object_id}, ) as example_dag: create_dataset_task = AutoMLCreateDatasetOperator( @@ -249,8 +248,9 @@ def get_target_column_spec(columns_specs: List[Dict], column_name: str) -> str: with models.DAG( "example_gcp_get_deploy", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=["example"], ) as get_deploy_dag: # [START howto_operator_get_model] get_model_task = AutoMLGetModelOperator( @@ -273,8 +273,9 @@ def get_target_column_spec(columns_specs: List[Dict], column_name: str) -> str: with models.DAG( "example_gcp_predict", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=["example"], ) as predict_dag: # [START howto_operator_prediction] predict_task = AutoMLPredictOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_automl_translation.py b/airflow/providers/google/cloud/example_dags/example_automl_translation.py index a031936e7a080..87e5265dac8ab 100644 --- a/airflow/providers/google/cloud/example_dags/example_automl_translation.py +++ b/airflow/providers/google/cloud/example_dags/example_automl_translation.py @@ -56,15 +56,14 @@ IMPORT_INPUT_CONFIG = {"gcs_source": {"input_uris": [GCP_AUTOML_TRANSLATION_BUCKET]}} -default_args = {"start_date": days_ago(1)} extract_object_id = CloudAutoMLHook.extract_object_id # Example DAG for AutoML Translation with models.DAG( "example_automl_translation", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), user_defined_macros={"extract_object_id": extract_object_id}, tags=['example'], ) as example_dag: diff --git a/airflow/providers/google/cloud/example_dags/example_automl_video_intelligence_classification.py b/airflow/providers/google/cloud/example_dags/example_automl_video_intelligence_classification.py index efcd7f775c88a..2f91223749d44 100644 --- a/airflow/providers/google/cloud/example_dags/example_automl_video_intelligence_classification.py +++ b/airflow/providers/google/cloud/example_dags/example_automl_video_intelligence_classification.py @@ -53,15 +53,14 @@ IMPORT_INPUT_CONFIG = {"gcs_source": {"input_uris": [GCP_AUTOML_VIDEO_BUCKET]}} -default_args = {"start_date": days_ago(1)} extract_object_id = CloudAutoMLHook.extract_object_id # Example DAG for AutoML Video Intelligence Classification with models.DAG( "example_automl_video", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), user_defined_macros={"extract_object_id": extract_object_id}, tags=['example'], ) as example_dag: diff --git a/airflow/providers/google/cloud/example_dags/example_automl_video_intelligence_tracking.py b/airflow/providers/google/cloud/example_dags/example_automl_video_intelligence_tracking.py index e471724e13193..257a450159918 100644 --- a/airflow/providers/google/cloud/example_dags/example_automl_video_intelligence_tracking.py +++ b/airflow/providers/google/cloud/example_dags/example_automl_video_intelligence_tracking.py @@ -54,15 +54,14 @@ IMPORT_INPUT_CONFIG = {"gcs_source": {"input_uris": [GCP_AUTOML_TRACKING_BUCKET]}} -default_args = {"start_date": days_ago(1)} extract_object_id = CloudAutoMLHook.extract_object_id # Example DAG for AutoML Video Intelligence Object Tracking with models.DAG( "example_automl_video_tracking", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), user_defined_macros={"extract_object_id": extract_object_id}, tags=['example'], ) as example_dag: diff --git a/airflow/providers/google/cloud/example_dags/example_automl_vision_classification.py b/airflow/providers/google/cloud/example_dags/example_automl_vision_classification.py index d8052e642a99c..48d14aca03c90 100644 --- a/airflow/providers/google/cloud/example_dags/example_automl_vision_classification.py +++ b/airflow/providers/google/cloud/example_dags/example_automl_vision_classification.py @@ -53,15 +53,14 @@ IMPORT_INPUT_CONFIG = {"gcs_source": {"input_uris": [GCP_AUTOML_VISION_BUCKET]}} -default_args = {"start_date": days_ago(1)} extract_object_id = CloudAutoMLHook.extract_object_id # Example DAG for AutoML Vision Classification with models.DAG( "example_automl_vision", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), user_defined_macros={"extract_object_id": extract_object_id}, tags=['example'], ) as example_dag: diff --git a/airflow/providers/google/cloud/example_dags/example_automl_vision_object_detection.py b/airflow/providers/google/cloud/example_dags/example_automl_vision_object_detection.py index b999cfb1535f2..77bac055a6508 100644 --- a/airflow/providers/google/cloud/example_dags/example_automl_vision_object_detection.py +++ b/airflow/providers/google/cloud/example_dags/example_automl_vision_object_detection.py @@ -53,15 +53,14 @@ IMPORT_INPUT_CONFIG = {"gcs_source": {"input_uris": [GCP_AUTOML_DETECTION_BUCKET]}} -default_args = {"start_date": days_ago(1)} extract_object_id = CloudAutoMLHook.extract_object_id # Example DAG for AutoML Vision Object Detection with models.DAG( "example_automl_vision_detection", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), user_defined_macros={"extract_object_id": extract_object_id}, tags=['example'], ) as example_dag: diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py b/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py index d8866a6c74ab5..7e39a55f5671d 100644 --- a/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py +++ b/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py @@ -69,12 +69,10 @@ # [END howto_bigquery_dts_create_args] -default_args = {"start_date": days_ago(1)} - with models.DAG( "example_gcp_bigquery_dts", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: # [START howto_bigquery_create_data_transfer] diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py b/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py index 1bdb4910c0d69..8a110e0924ff8 100644 --- a/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py +++ b/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py @@ -33,8 +33,6 @@ ) from airflow.utils.dates import days_ago -default_args = {"start_date": days_ago(1)} - PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project") BQ_LOCATION = "europe-north1" @@ -52,8 +50,8 @@ with models.DAG( "example_bigquery_operations", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=["example"], ) as dag: # [START howto_operator_bigquery_create_table] @@ -176,8 +174,8 @@ with models.DAG( "example_bigquery_operations_location", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=["example"], ): create_dataset_with_location = BigQueryCreateEmptyDatasetOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_queries.py b/airflow/providers/google/cloud/example_dags/example_bigquery_queries.py index e17a53dceecaf..e0259cb70bdef 100644 --- a/airflow/providers/google/cloud/example_dags/example_bigquery_queries.py +++ b/airflow/providers/google/cloud/example_dags/example_bigquery_queries.py @@ -54,15 +54,13 @@ {"name": "ds", "type": "DATE", "mode": "NULLABLE"}, ] -default_args = {"start_date": days_ago(1)} - for location in [None, LOCATION]: dag_id = "example_bigquery_queries_location" if location else "example_bigquery_queries" with models.DAG( dag_id, - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=["example"], user_defined_macros={"DATASET": DATASET_NAME, "TABLE": TABLE_1} ) as dag_with_locations: diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py b/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py index 3171d3062cf30..46115feda3987 100644 --- a/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py +++ b/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py @@ -33,12 +33,10 @@ ORIGIN = "origin" TARGET = "target" -default_args = {"start_date": days_ago(1)} - with models.DAG( "example_bigquery_to_bigquery", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=["example"], ) as dag: copy_selected_data = BigQueryToBigQueryOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py index 221138f4a28e9..b5b15f016895c 100644 --- a/airflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py @@ -35,12 +35,10 @@ ) TABLE = "table_42" -default_args = {"start_date": days_ago(1)} - with models.DAG( "example_bigquery_to_gcs", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=["example"], ) as dag: bigquery_to_gcs = BigQueryToGCSOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_transfer.py b/airflow/providers/google/cloud/example_dags/example_bigquery_transfer.py index 4667919190c09..513195e82a1da 100644 --- a/airflow/providers/google/cloud/example_dags/example_bigquery_transfer.py +++ b/airflow/providers/google/cloud/example_dags/example_bigquery_transfer.py @@ -37,12 +37,10 @@ ORIGIN = "origin" TARGET = "target" -default_args = {"start_date": days_ago(1)} - with models.DAG( "example_bigquery_transfer", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=["example"], ) as dag: copy_selected_data = BigQueryToBigQueryOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_bigtable.py b/airflow/providers/google/cloud/example_dags/example_bigtable.py index 8210f9831a66e..b51b6ffd4ab80 100644 --- a/airflow/providers/google/cloud/example_dags/example_bigtable.py +++ b/airflow/providers/google/cloud/example_dags/example_bigtable.py @@ -69,14 +69,11 @@ CBT_TABLE_ID = getenv('CBT_TABLE_ID', 'some-table-id') CBT_POKE_INTERVAL = getenv('CBT_POKE_INTERVAL', '60') -default_args = { - 'start_date': days_ago(1) -} with models.DAG( 'example_gcp_bigtable_operators', - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: # [START howto_operator_gcp_bigtable_instance_create] diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py b/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py index 55768ca9f9722..5dbfb1805b228 100644 --- a/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +++ b/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py @@ -53,12 +53,10 @@ SECOND_INSTANCE = {"tier": Instance.Tier.STANDARD_HA, "memory_size_gb": 3} -default_args = {"start_date": dates.days_ago(1)} - with models.DAG( "gcp_cloud_memorystore", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=dates.days_ago(1), tags=['example'], ) as dag: # [START howto_operator_create_instance] diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_sql.py b/airflow/providers/google/cloud/example_dags/example_cloud_sql.py index 637f05aa32e3b..c2f52e5777221 100644 --- a/airflow/providers/google/cloud/example_dags/example_cloud_sql.py +++ b/airflow/providers/google/cloud/example_dags/example_cloud_sql.py @@ -168,14 +168,10 @@ } # [END howto_operator_cloudsql_db_patch_body] -default_args = { - 'start_date': days_ago(1) -} - with models.DAG( 'example_gcp_sql', - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: # ############################################## # diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py b/airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py index 27d85e2f56fe7..94fed1dddec9c 100644 --- a/airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py +++ b/airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py @@ -87,10 +87,6 @@ 'DROP TABLE TABLE_TEST2', ] -default_args = { - 'start_date': days_ago(1) -} - # [START howto_operator_cloudsql_query_connections] @@ -272,8 +268,8 @@ def get_absolute_path(path): with models.DAG( dag_id='example_gcp_sql_query', - default_args=default_args, schedule_interval=None, + start_date=days_ago(1), tags=['example'], ) as dag: prev_task = None diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py b/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py index 019adc49598f2..a994f6b632210 100644 --- a/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py +++ b/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py @@ -92,13 +92,13 @@ # [START howto_operator_gcp_transfer_default_args] -default_args = {'start_date': days_ago(1)} +default_args = {'owner': 'airflow'} # [END howto_operator_gcp_transfer_default_args] with models.DAG( 'example_gcp_transfer_aws', - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py b/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py index 0888c54ec0eba..1df2541a11e27 100644 --- a/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py +++ b/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py @@ -82,12 +82,10 @@ } # [END howto_operator_gcp_transfer_update_job_body] -default_args = {"start_date": days_ago(1)} - with models.DAG( "example_gcp_transfer", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=["example"], ) as dag: diff --git a/airflow/providers/google/cloud/example_dags/example_compute.py b/airflow/providers/google/cloud/example_dags/example_compute.py index 330fcaaded0e1..3153715c01e30 100644 --- a/airflow/providers/google/cloud/example_dags/example_compute.py +++ b/airflow/providers/google/cloud/example_dags/example_compute.py @@ -44,17 +44,14 @@ GCE_INSTANCE = os.environ.get('GCE_INSTANCE', 'testinstance') # [END howto_operator_gce_args_common] -default_args = { - 'start_date': days_ago(1), -} GCE_SHORT_MACHINE_TYPE_NAME = os.environ.get('GCE_SHORT_MACHINE_TYPE_NAME', 'n1-standard-1') with models.DAG( 'example_gcp_compute', - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: # [START howto_operator_gce_start] diff --git a/airflow/providers/google/cloud/example_dags/example_compute_igm.py b/airflow/providers/google/cloud/example_dags/example_compute_igm.py index 422761984880a..f091af1f90765 100644 --- a/airflow/providers/google/cloud/example_dags/example_compute_igm.py +++ b/airflow/providers/google/cloud/example_dags/example_compute_igm.py @@ -49,10 +49,6 @@ GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project') GCE_ZONE = os.environ.get('GCE_ZONE', 'europe-west1-b') -default_args = { - 'start_date': days_ago(1) -} - # [START howto_operator_compute_template_copy_args] GCE_TEMPLATE_NAME = os.environ.get('GCE_TEMPLATE_NAME', 'instance-template-test') GCE_NEW_TEMPLATE_NAME = os.environ.get('GCE_NEW_TEMPLATE_NAME', @@ -95,8 +91,8 @@ with models.DAG( 'example_gcp_compute_igm', - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: # [START howto_operator_gce_igm_copy_template] diff --git a/airflow/providers/google/cloud/example_dags/example_datacatalog.py b/airflow/providers/google/cloud/example_dags/example_datacatalog.py index 4f4ec09e2bd37..4a2d13a42cfb8 100644 --- a/airflow/providers/google/cloud/example_dags/example_datacatalog.py +++ b/airflow/providers/google/cloud/example_dags/example_datacatalog.py @@ -39,8 +39,6 @@ from airflow.utils.dates import days_ago from airflow.utils.helpers import chain -default_args = {"start_date": days_ago(1)} - PROJECT_ID = "polidea-airflow" LOCATION = "us-central1" ENTRY_GROUP_ID = "important_data_jan_2019" @@ -50,7 +48,7 @@ FIELD_NAME_2 = "second" FIELD_NAME_3 = "first-rename" -with models.DAG("example_gcp_datacatalog", default_args=default_args, schedule_interval=None) as dag: +with models.DAG("example_gcp_datacatalog", start_date=days_ago(1), schedule_interval=None) as dag: # Create # [START howto_operator_gcp_datacatalog_create_entry_group] create_entry_group = CloudDataCatalogCreateEntryGroupOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_dataflow.py b/airflow/providers/google/cloud/example_dags/example_dataflow.py index c9034cf45fc8b..d5d82d66a8778 100644 --- a/airflow/providers/google/cloud/example_dags/example_dataflow.py +++ b/airflow/providers/google/cloud/example_dags/example_dataflow.py @@ -41,7 +41,6 @@ GCS_JAR_OBJECT_NAME = GCS_JAR_PARTS.path[1:] default_args = { - "start_date": days_ago(1), 'dataflow_default_options': { 'tempLocation': GCS_TMP, 'stagingLocation': GCS_STAGING, @@ -50,8 +49,8 @@ with models.DAG( "example_gcp_dataflow_native_java", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag_native_java: @@ -93,6 +92,7 @@ with models.DAG( "example_gcp_dataflow_native_python", default_args=default_args, + start_date=days_ago(1), schedule_interval=None, # Override to match your needs tags=['example'], ) as dag_native_python: @@ -133,6 +133,7 @@ with models.DAG( "example_gcp_dataflow_template", default_args=default_args, + start_date=days_ago(1), schedule_interval=None, # Override to match your needs tags=['example'], ) as dag_template: diff --git a/airflow/providers/google/cloud/example_dags/example_datafusion.py b/airflow/providers/google/cloud/example_dags/example_datafusion.py index 62ab1d46fdaf8..60aa8f17bb717 100644 --- a/airflow/providers/google/cloud/example_dags/example_datafusion.py +++ b/airflow/providers/google/cloud/example_dags/example_datafusion.py @@ -133,12 +133,11 @@ } # [END howto_data_fusion_env_variables] -default_args = {"start_date": dates.days_ago(1)} with models.DAG( "example_data_fusion", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=dates.days_ago(1) ) as dag: # [START howto_cloud_data_fusion_create_instance_operator] create_instance = CloudDataFusionCreateInstanceOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_dataproc.py b/airflow/providers/google/cloud/example_dags/example_dataproc.py index 55dbff5d079ce..7dedeebced625 100644 --- a/airflow/providers/google/cloud/example_dags/example_dataproc.py +++ b/airflow/providers/google/cloud/example_dags/example_dataproc.py @@ -146,7 +146,7 @@ with models.DAG( "example_gcp_dataproc", - default_args={"start_date": days_ago(1)}, + start_date=days_ago(1), schedule_interval=None, ) as dag: # [START how_to_cloud_dataproc_create_cluster_operator] diff --git a/airflow/providers/google/cloud/example_dags/example_datastore.py b/airflow/providers/google/cloud/example_dags/example_datastore.py index 618e443acdaa2..4129b53036b10 100644 --- a/airflow/providers/google/cloud/example_dags/example_datastore.py +++ b/airflow/providers/google/cloud/example_dags/example_datastore.py @@ -23,36 +23,135 @@ """ import os +from typing import Any, Dict from airflow import models from airflow.providers.google.cloud.operators.datastore import ( + CloudDatastoreAllocateIdsOperator, CloudDatastoreBeginTransactionOperator, CloudDatastoreCommitOperator, CloudDatastoreExportEntitiesOperator, CloudDatastoreImportEntitiesOperator, + CloudDatastoreRollbackOperator, CloudDatastoreRunQueryOperator, ) from airflow.utils import dates GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project") BUCKET = os.environ.get("GCP_DATASTORE_BUCKET", "datastore-system-test") -default_args = {"start_date": dates.days_ago(1)} - with models.DAG( "example_gcp_datastore", - default_args=default_args, schedule_interval=None, # Override to match your needs - tags=['example'], + start_date=dates.days_ago(1), + tags=["example"], ) as dag: + # [START how_to_export_task] export_task = CloudDatastoreExportEntitiesOperator( task_id="export_task", bucket=BUCKET, project_id=GCP_PROJECT_ID, overwrite_existing=True, ) + # [END how_to_export_task] + # [START how_to_import_task] import_task = CloudDatastoreImportEntitiesOperator( task_id="import_task", bucket="{{ task_instance.xcom_pull('export_task')['response']['outputUrl'].split('/')[2] }}", file="{{ '/'.join(task_instance.xcom_pull('export_task')['response']['outputUrl'].split('/')[3:]) }}", - project_id=GCP_PROJECT_ID + project_id=GCP_PROJECT_ID, ) + # [END how_to_import_task] export_task >> import_task + +# [START how_to_keys_def] +KEYS = [ + { + "partitionId": {"projectId": GCP_PROJECT_ID, "namespaceId": ""}, + "path": {"kind": "airflow"}, + } +] +# [END how_to_keys_def] + +# [START how_to_transaction_def] +TRANSACTION_OPTIONS: Dict[str, Any] = {"readWrite": {}} +# [END how_to_transaction_def] + +# [START how_to_commit_def] +COMMIT_BODY = { + "mode": "TRANSACTIONAL", + "mutations": [ + { + "insert": { + "key": KEYS[0], + "properties": {"string": {"stringValue": "airflow is awesome!"}}, + } + } + ], + "transaction": "{{ task_instance.xcom_pull('begin_transaction_commit') }}", +} +# [END how_to_commit_def] + +# [START how_to_query_def] +QUERY = { + "partitionId": {"projectId": GCP_PROJECT_ID, "namespaceId": ""}, + "readOptions": { + "transaction": "{{ task_instance.xcom_pull('begin_transaction_query') }}" + }, + "query": {}, +} +# [END how_to_query_def] + +with models.DAG( + "example_gcp_datastore_operations", + start_date=dates.days_ago(1), + schedule_interval=None, # Override to match your needs + tags=["example"], +) as dag2: + # [START how_to_allocate_ids] + allocate_ids = CloudDatastoreAllocateIdsOperator( + task_id="allocate_ids", partial_keys=KEYS, project_id=GCP_PROJECT_ID + ) + # [END how_to_allocate_ids] + + # [START how_to_begin_transaction] + begin_transaction_commit = CloudDatastoreBeginTransactionOperator( + task_id="begin_transaction_commit", + transaction_options=TRANSACTION_OPTIONS, + project_id=GCP_PROJECT_ID, + ) + # [END how_to_begin_transaction] + + # [START how_to_commit_task] + commit_task = CloudDatastoreCommitOperator( + task_id="commit_task", body=COMMIT_BODY, project_id=GCP_PROJECT_ID + ) + # [END how_to_commit_task] + + allocate_ids >> begin_transaction_commit >> commit_task + + begin_transaction_query = CloudDatastoreBeginTransactionOperator( + task_id="begin_transaction_query", + transaction_options=TRANSACTION_OPTIONS, + project_id=GCP_PROJECT_ID, + ) + + # [START how_to_run_query] + run_query = CloudDatastoreRunQueryOperator( + task_id="run_query", body=QUERY, project_id=GCP_PROJECT_ID + ) + # [END how_to_run_query] + + allocate_ids >> begin_transaction_query >> run_query + + begin_transaction_to_rollback = CloudDatastoreBeginTransactionOperator( + task_id="begin_transaction_to_rollback", + transaction_options=TRANSACTION_OPTIONS, + project_id=GCP_PROJECT_ID, + ) + + # [START how_to_rollback_transaction] + rollback_transaction = CloudDatastoreRollbackOperator( + task_id="rollback_transaction", + transaction="{{ task_instance.xcom_pull('begin_transaction_to_rollback') }}", + ) + begin_transaction_to_rollback >> rollback_transaction + # [END how_to_rollback_transaction] diff --git a/airflow/providers/google/cloud/example_dags/example_dlp.py b/airflow/providers/google/cloud/example_dags/example_dlp.py index 43abbef45832b..ddc0ca3cd4b5b 100644 --- a/airflow/providers/google/cloud/example_dags/example_dlp.py +++ b/airflow/providers/google/cloud/example_dags/example_dlp.py @@ -35,9 +35,6 @@ ) from airflow.utils.dates import days_ago -default_args = {"start_date": days_ago(1)} - - GCP_PROJECT = os.environ.get("GCP_PROJECT_ID", "example-project") TEMPLATE_ID = "dlp-inspect-838746" ITEM = ContentItem( @@ -54,8 +51,8 @@ with models.DAG( "example_gcp_dlp", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: create_template = CloudDLPCreateInspectTemplateOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py index 0bebd35a07019..656d73eaddde5 100644 --- a/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py @@ -55,12 +55,10 @@ } # [END howto_FB_ADS_variables] -default_args = {"start_date": days_ago(1)} - with models.DAG( "example_facebook_ads_to_gcs", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1) ) as dag: create_bucket = GCSCreateBucketOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_functions.py b/airflow/providers/google/cloud/example_dags/example_functions.py index 05e212cd0fb31..ed3dd4f17c4c2 100644 --- a/airflow/providers/google/cloud/example_dags/example_functions.py +++ b/airflow/providers/google/cloud/example_dags/example_functions.py @@ -78,7 +78,7 @@ # [START howto_operator_gcf_default_args] default_args = { - 'start_date': dates.days_ago(1) + 'owner': 'airflow' } # [END howto_operator_gcf_default_args] @@ -101,8 +101,8 @@ with models.DAG( 'example_gcp_function', - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=dates.days_ago(1), tags=['example'], ) as dag: # [START howto_operator_gcf_deploy] diff --git a/airflow/providers/google/cloud/example_dags/example_gcs.py b/airflow/providers/google/cloud/example_dags/example_gcs.py index 18f173f66edbe..d885addcbbcbc 100644 --- a/airflow/providers/google/cloud/example_dags/example_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_gcs.py @@ -34,8 +34,6 @@ from airflow.utils.dates import days_ago from airflow.utils.state import State -default_args = {"start_date": days_ago(1)} - PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-id") BUCKET_1 = os.environ.get("GCP_GCS_BUCKET_1", "test-gcs-example-bucket") GCS_ACL_ENTITY = os.environ.get("GCS_ACL_ENTITY", "allUsers") @@ -57,7 +55,7 @@ BUCKET_FILE_LOCATION = PATH_TO_UPLOAD_FILE.rpartition("/")[-1] with models.DAG( - "example_gcs", default_args=default_args, schedule_interval=None, tags=['example'], + "example_gcs", start_date=days_ago(1), schedule_interval=None, tags=['example'], ) as dag: create_bucket1 = GCSCreateBucketOperator( task_id="create_bucket1", bucket_name=BUCKET_1, project_id=PROJECT_ID diff --git a/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py b/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py index 9035f2dcbaff9..2ef99a92bf175 100644 --- a/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py +++ b/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py @@ -32,13 +32,11 @@ DATASET_NAME = os.environ.get("GCP_DATASET_NAME", 'airflow_test') TABLE_NAME = os.environ.get("GCP_TABLE_NAME", 'gcs_to_bq_table') -args = { - 'start_date': days_ago(2) -} - dag = models.DAG( - dag_id='example_gcs_to_bigquery_operator', default_args=args, - schedule_interval=None, tags=['example']) + dag_id='example_gcs_to_bigquery_operator', + start_date=days_ago(2), + schedule_interval=None, + tags=['example']) create_test_dataset = BigQueryCreateEmptyDatasetOperator( task_id='create_airflow_test_dataset', diff --git a/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py index 20f0b2b28da4a..4dbbe72647b98 100644 --- a/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py @@ -26,8 +26,6 @@ from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.dates import days_ago -default_args = {"start_date": days_ago(1)} - BUCKET_1_SRC = os.environ.get("GCP_GCS_BUCKET_1_SRC", "test-gcs-sync-1-src") BUCKET_1_DST = os.environ.get("GCP_GCS_BUCKET_1_DST", "test-gcs-sync-1-dst") @@ -41,7 +39,7 @@ OBJECT_2 = os.environ.get("GCP_GCS_OBJECT_2", "test-gcs-to-gcs-2") with models.DAG( - "example_gcs_to_gcs", default_args=default_args, schedule_interval=None, tags=['example'] + "example_gcs_to_gcs", start_date=days_ago(1), schedule_interval=None, tags=['example'] ) as dag: # [START howto_synch_bucket] sync_bucket = GCSSynchronizeBucketsOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py b/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py index d325e9c8e534a..c85d5878a3588 100644 --- a/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py +++ b/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py @@ -25,8 +25,6 @@ from airflow.providers.google.cloud.transfers.gcs_to_sftp import GCSToSFTPOperator from airflow.utils.dates import days_ago -default_args = {"start_date": days_ago(1)} - BUCKET_SRC = os.environ.get("GCP_GCS_BUCKET_1_SRC", "test-gcs-sftp") OBJECT_SRC_1 = "parent-1.bin" OBJECT_SRC_2 = "parent-2.bin" @@ -36,7 +34,7 @@ with models.DAG( - "example_gcs_to_sftp", default_args=default_args, schedule_interval=None, tags=['example'] + "example_gcs_to_sftp", start_date=days_ago(1), schedule_interval=None, tags=['example'] ) as dag: # [START howto_operator_gcs_to_sftp_copy_single_file] copy_file_from_gcs_to_sftp = GCSToSFTPOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py b/airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py index a9c9c581de422..2d085b30da928 100644 --- a/airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py +++ b/airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py @@ -36,12 +36,10 @@ CLUSTER = {"name": CLUSTER_NAME, "initial_node_count": 1} # [END howto_operator_gcp_gke_create_cluster_definition] -default_args = {"start_date": days_ago(1)} - with models.DAG( "example_gcp_gke", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: # [START howto_operator_gke_create_cluster] diff --git a/airflow/providers/google/cloud/example_dags/example_mlengine.py b/airflow/providers/google/cloud/example_dags/example_mlengine.py index f805340814f3d..1b83a1ba6102b 100644 --- a/airflow/providers/google/cloud/example_dags/example_mlengine.py +++ b/airflow/providers/google/cloud/example_dags/example_mlengine.py @@ -50,7 +50,6 @@ SUMMARY_STAGING = os.environ.get("GCP_MLENGINE_DATAFLOW_STAGING", "gs://test-airflow-mlengine/staging/") default_args = { - "start_date": days_ago(1), "params": { "model_name": MODEL_NAME } @@ -58,8 +57,8 @@ with models.DAG( "example_gcp_mlengine", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: # [START howto_operator_gcp_mlengine_training] diff --git a/airflow/providers/google/cloud/example_dags/example_natural_language.py b/airflow/providers/google/cloud/example_dags/example_natural_language.py index baa656356a09e..e8a26892f3a08 100644 --- a/airflow/providers/google/cloud/example_dags/example_natural_language.py +++ b/airflow/providers/google/cloud/example_dags/example_natural_language.py @@ -47,12 +47,10 @@ # [END howto_operator_gcp_natural_language_document_gcs] -default_args = {"start_date": days_ago(1)} - with models.DAG( "example_gcp_natural_language", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1) ) as dag: # [START howto_operator_gcp_natural_language_analyze_entities] diff --git a/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py index d4df9336dd532..2e61f63ffb129 100644 --- a/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py @@ -26,12 +26,10 @@ FILENAME = "test_file" SQL_QUERY = "select * from test_table;" -default_args = {"start_date": days_ago(1)} - with models.DAG( dag_id='example_postgres_to_gcs', - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: upload_data = PostgresToGCSOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py index ca4ae0f6413b1..3256090b04a05 100644 --- a/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py @@ -44,12 +44,10 @@ def safe_name(s: str) -> str: return re.sub("[^0-9a-zA-Z_]+", "_", s) -default_args = {"start_date": days_ago(1)} - with models.DAG( dag_id="example_presto_to_gcs", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=["example"], ) as dag: diff --git a/airflow/providers/google/cloud/example_dags/example_pubsub.py b/airflow/providers/google/cloud/example_dags/example_pubsub.py index 8179b040275b7..c631e2b5054cb 100644 --- a/airflow/providers/google/cloud/example_dags/example_pubsub.py +++ b/airflow/providers/google/cloud/example_dags/example_pubsub.py @@ -35,8 +35,6 @@ TOPIC_FOR_OPERATOR_DAG = "PubSubOperatorTestTopic" MESSAGE = {"data": b"Tool", "attributes": {"name": "wrench", "mass": "1.3kg", "count": "3"}} -default_args = {"start_date": days_ago(1)} - # [START howto_operator_gcp_pubsub_pull_messages_result_cmd] echo_cmd = """ {% for m in task_instance.xcom_pull('pull_messages') %} @@ -47,8 +45,8 @@ with models.DAG( "example_gcp_pubsub_sensor", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1) ) as example_sensor_dag: # [START howto_operator_gcp_pubsub_create_topic] create_topic = PubSubCreateTopicOperator( @@ -108,8 +106,8 @@ with models.DAG( "example_gcp_pubsub_operator", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1) ) as example_operator_dag: # [START howto_operator_gcp_pubsub_create_topic] create_topic = PubSubCreateTopicOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py index 9c6f31529a719..218fa31fbd74b 100644 --- a/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py @@ -25,8 +25,6 @@ from airflow.providers.google.cloud.transfers.sftp_to_gcs import SFTPToGCSOperator from airflow.utils.dates import days_ago -default_args = {"start_date": days_ago(1)} - BUCKET_SRC = os.environ.get("GCP_GCS_BUCKET_1_SRC", "test-sftp-gcs") TMP_PATH = "/tmp" @@ -39,7 +37,7 @@ with models.DAG( - "example_sftp_to_gcs", default_args=default_args, schedule_interval=None + "example_sftp_to_gcs", start_date=days_ago(1), schedule_interval=None ) as dag: # [START howto_operator_sftp_to_gcs_copy_single_file] copy_file_from_sftp_to_gcs = SFTPToGCSOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py index b4ecfae8990ee..c09306c275e68 100644 --- a/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py @@ -25,11 +25,9 @@ BUCKET = os.environ.get("GCP_GCS_BUCKET", "test28397yeo") SPREADSHEET_ID = os.environ.get("SPREADSHEET_ID", "1234567890qwerty") -default_args = {"start_date": days_ago(1)} - with models.DAG( "example_sheets_to_gcs", - default_args=default_args, + start_date=days_ago(1), schedule_interval=None, # Override to match your needs tags=["example"], ) as dag: diff --git a/airflow/providers/google/cloud/example_dags/example_spanner.py b/airflow/providers/google/cloud/example_dags/example_spanner.py index 5eec7ad89d924..51de839e3e087 100644 --- a/airflow/providers/google/cloud/example_dags/example_spanner.py +++ b/airflow/providers/google/cloud/example_dags/example_spanner.py @@ -52,14 +52,10 @@ # OPERATION_ID should be unique per operation OPERATION_ID = 'unique_operation_id' -default_args = { - 'start_date': days_ago(1) -} - with models.DAG( 'example_gcp_spanner', - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: # Create diff --git a/airflow/providers/google/cloud/example_dags/example_speech_to_text.py b/airflow/providers/google/cloud/example_dags/example_speech_to_text.py index 7e2920da0515f..1340eea7b02a3 100644 --- a/airflow/providers/google/cloud/example_dags/example_speech_to_text.py +++ b/airflow/providers/google/cloud/example_dags/example_speech_to_text.py @@ -41,11 +41,9 @@ AUDIO = {"uri": "gs://{bucket}/{object}".format(bucket=BUCKET_NAME, object=FILENAME)} # [END howto_operator_speech_to_text_api_arguments] -default_args = {"start_date": dates.days_ago(1)} - with models.DAG( "example_gcp_speech_to_text", - default_args=default_args, + start_date=dates.days_ago(1), schedule_interval=None, # Override to match your needs tags=['example'], ) as dag: diff --git a/airflow/providers/google/cloud/example_dags/example_stackdriver.py b/airflow/providers/google/cloud/example_dags/example_stackdriver.py index df017ec5d52df..658abe7f86468 100644 --- a/airflow/providers/google/cloud/example_dags/example_stackdriver.py +++ b/airflow/providers/google/cloud/example_dags/example_stackdriver.py @@ -106,12 +106,10 @@ "type": "slack" } -default_args = {"start_date": days_ago(1)} - with models.DAG( 'example_stackdriver', - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'] ) as dag: # [START howto_operator_gcp_stackdriver_upsert_notification_channel] diff --git a/airflow/providers/google/cloud/example_dags/example_tasks.py b/airflow/providers/google/cloud/example_dags/example_tasks.py index 83e1cc66b619f..6912cf138e7d4 100644 --- a/airflow/providers/google/cloud/example_dags/example_tasks.py +++ b/airflow/providers/google/cloud/example_dags/example_tasks.py @@ -35,7 +35,6 @@ ) from airflow.utils.dates import days_ago -default_args = {"start_date": days_ago(1)} timestamp = timestamp_pb2.Timestamp() timestamp.FromDatetime(datetime.now() + timedelta(hours=12)) # pylint: disable=no-member @@ -55,8 +54,8 @@ with models.DAG( "example_gcp_tasks", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: diff --git a/airflow/providers/google/cloud/example_dags/example_text_to_speech.py b/airflow/providers/google/cloud/example_dags/example_text_to_speech.py index 0ce4112ac25c7..172f0d20ab657 100644 --- a/airflow/providers/google/cloud/example_dags/example_text_to_speech.py +++ b/airflow/providers/google/cloud/example_dags/example_text_to_speech.py @@ -35,11 +35,9 @@ AUDIO_CONFIG = {"audio_encoding": "LINEAR16"} # [END howto_operator_text_to_speech_api_arguments] -default_args = {"start_date": dates.days_ago(1)} - with models.DAG( "example_gcp_text_to_speech", - default_args=default_args, + start_date=dates.days_ago(1), schedule_interval=None, # Override to match your needs tags=['example'], ) as dag: diff --git a/airflow/providers/google/cloud/example_dags/example_translate.py b/airflow/providers/google/cloud/example_dags/example_translate.py index f44940d56aaad..7c954f0ede671 100644 --- a/airflow/providers/google/cloud/example_dags/example_translate.py +++ b/airflow/providers/google/cloud/example_dags/example_translate.py @@ -27,12 +27,10 @@ from airflow.providers.google.cloud.operators.translate import CloudTranslateTextOperator from airflow.utils.dates import days_ago -default_args = {'start_date': days_ago(1)} - with models.DAG( 'example_gcp_translate', - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: # [START howto_operator_translate_text] diff --git a/airflow/providers/google/cloud/example_dags/example_translate_speech.py b/airflow/providers/google/cloud/example_dags/example_translate_speech.py index 99968a196ab37..74ce735e3d3e0 100644 --- a/airflow/providers/google/cloud/example_dags/example_translate_speech.py +++ b/airflow/providers/google/cloud/example_dags/example_translate_speech.py @@ -45,12 +45,11 @@ SOURCE_LANGUAGE = None # type: None # [END howto_operator_translate_speech_arguments] -default_args = {"start_date": dates.days_ago(1)} with models.DAG( "example_gcp_translate_speech", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=dates.days_ago(1), tags=['example'], ) as dag: text_to_speech_synthesize_task = CloudTextToSpeechSynthesizeOperator( diff --git a/airflow/providers/google/cloud/example_dags/example_video_intelligence.py b/airflow/providers/google/cloud/example_dags/example_video_intelligence.py index 78b42086508ef..6098b812f04d1 100644 --- a/airflow/providers/google/cloud/example_dags/example_video_intelligence.py +++ b/airflow/providers/google/cloud/example_dags/example_video_intelligence.py @@ -36,8 +36,6 @@ ) from airflow.utils.dates import days_ago -default_args = {"start_date": days_ago(1)} - # [START howto_operator_video_intelligence_os_args] GCP_BUCKET_NAME = os.environ.get( "GCP_VIDEO_INTELLIGENCE_BUCKET_NAME", "test-bucket-name" @@ -52,8 +50,8 @@ with models.DAG( "example_gcp_video_intelligence", - default_args=default_args, schedule_interval=None, # Override to match your needs + start_date=days_ago(1), tags=['example'], ) as dag: diff --git a/airflow/providers/google/cloud/example_dags/example_vision.py b/airflow/providers/google/cloud/example_dags/example_vision.py index 330e1cc30be51..3d81776c34ae7 100644 --- a/airflow/providers/google/cloud/example_dags/example_vision.py +++ b/airflow/providers/google/cloud/example_dags/example_vision.py @@ -64,8 +64,6 @@ # [END howto_operator_vision_enums_import] -default_args = {'start_date': days_ago(1)} - GCP_VISION_LOCATION = os.environ.get('GCP_VISION_LOCATION', 'europe-west1') GCP_VISION_PRODUCT_SET_ID = os.environ.get('GCP_VISION_PRODUCT_SET_ID', 'product_set_explicit_id') @@ -98,7 +96,7 @@ # [END howto_operator_vision_detect_image_param] with models.DAG( - 'example_gcp_vision_autogenerated_id', default_args=default_args, schedule_interval=None + 'example_gcp_vision_autogenerated_id', start_date=days_ago(1), schedule_interval=None ) as dag_autogenerated_id: # ################################## # # ### Autogenerated IDs examples ### # @@ -236,7 +234,7 @@ remove_product_from_product_set >> product_set_delete with models.DAG( - 'example_gcp_vision_explicit_id', default_args=default_args, schedule_interval=None + 'example_gcp_vision_explicit_id', start_date=days_ago(1), schedule_interval=None ) as dag_explicit_id: # ############################# # # ### Explicit IDs examples ### # @@ -401,7 +399,7 @@ remove_product_from_product_set_2 >> product_delete_2 with models.DAG( - 'example_gcp_vision_annotate_image', default_args=default_args, schedule_interval=None + 'example_gcp_vision_annotate_image', start_date=days_ago(1), schedule_interval=None ) as dag_annotate_image: # ############################## # # ### Annotate image example ### # diff --git a/airflow/providers/google/cloud/hooks/cloud_sql.py b/airflow/providers/google/cloud/hooks/cloud_sql.py index a8c0b2066d4d5..f89e213f311b1 100644 --- a/airflow/providers/google/cloud/hooks/cloud_sql.py +++ b/airflow/providers/google/cloud/hooks/cloud_sql.py @@ -33,6 +33,7 @@ import subprocess import time import uuid +from pathlib import Path from subprocess import PIPE, Popen from typing import Any, Dict, List, Optional, Sequence, Union from urllib.parse import quote_plus @@ -553,13 +554,8 @@ def start_proxy(self) -> None: else: command_to_run = [self.sql_proxy_path] command_to_run.extend(self.command_line_parameters) - try: - self.log.info("Creating directory %s", - self.cloud_sql_proxy_socket_directory) - os.makedirs(self.cloud_sql_proxy_socket_directory) - except OSError: - # Needed for python 2 compatibility (exists_ok missing) - pass + self.log.info("Creating directory %s", self.cloud_sql_proxy_socket_directory) + Path(self.cloud_sql_proxy_socket_directory).mkdir(parents=True, exist_ok=True) command_to_run.extend(self._get_credential_parameters()) # pylint: disable=no-value-for-parameter self.log.info("Running the command: `%s`", " ".join(command_to_run)) self.sql_proxy_process = Popen(command_to_run, diff --git a/airflow/providers/google/cloud/hooks/datafusion.py b/airflow/providers/google/cloud/hooks/datafusion.py index 50d552c8f7122..8e19acaa03c66 100644 --- a/airflow/providers/google/cloud/hooks/datafusion.py +++ b/airflow/providers/google/cloud/hooks/datafusion.py @@ -112,13 +112,15 @@ def wait_for_pipeline_state( start_time = monotonic() current_state = None while monotonic() - start_time < timeout: - current_state = self._get_workflow_state( - pipeline_name=pipeline_name, - pipeline_id=pipeline_id, - instance_url=instance_url, - namespace=namespace, - ) - + try: + current_state = self._get_workflow_state( + pipeline_name=pipeline_name, + pipeline_id=pipeline_id, + instance_url=instance_url, + namespace=namespace, + ) + except AirflowException: + pass # Because the pipeline may not be visible in system yet if current_state in success_states: return if current_state in failure_states: diff --git a/airflow/providers/google/cloud/hooks/dataproc.py b/airflow/providers/google/cloud/hooks/dataproc.py index b42a8d92a3187..78769cf9bbda0 100644 --- a/airflow/providers/google/cloud/hooks/dataproc.py +++ b/airflow/providers/google/cloud/hooks/dataproc.py @@ -357,8 +357,8 @@ def diagnose_cluster( metadata: Optional[Sequence[Tuple[str, str]]] = None, ): """ - Gets cluster diagnostic information. After the operation completes, the Operation.response field - contains ``DiagnoseClusterOutputLocation``. + Gets cluster diagnostic information. After the operation completes GCS uri to + diagnose is returned :param project_id: Required. The ID of the Google Cloud Platform project that the cluster belongs to. :type project_id: str @@ -376,7 +376,7 @@ def diagnose_cluster( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_cluster_client(location=region) - result = client.diagnose_cluster( + operation = client.diagnose_cluster( project_id=project_id, region=region, cluster_name=cluster_name, @@ -384,7 +384,9 @@ def diagnose_cluster( timeout=timeout, metadata=metadata, ) - return result + operation.result() + gcs_uri = str(operation.operation.response.value) + return gcs_uri @GoogleBaseHook.fallback_to_default_project_id def get_cluster( diff --git a/airflow/providers/google/cloud/hooks/datastore.py b/airflow/providers/google/cloud/hooks/datastore.py index 0dcf7f30c2ca3..92de4bddbf1db 100644 --- a/airflow/providers/google/cloud/hooks/datastore.py +++ b/airflow/providers/google/cloud/hooks/datastore.py @@ -100,7 +100,7 @@ def allocate_ids(self, partial_keys: List, project_id: str) -> List: return resp['keys'] @GoogleBaseHook.fallback_to_default_project_id - def begin_transaction(self, project_id: str) -> str: + def begin_transaction(self, project_id: str, transaction_options: Dict[str, Any]) -> str: """ Begins a new transaction. @@ -109,6 +109,8 @@ def begin_transaction(self, project_id: str) -> str: :param project_id: Google Cloud Platform project ID against which to make the request. :type project_id: str + :param transaction_options: Options for a new transaction. + :type transaction_options: Dict[str, Any] :return: a transaction handle. :rtype: str """ @@ -116,7 +118,7 @@ def begin_transaction(self, project_id: str) -> str: resp = (conn # pylint: disable=no-member .projects() - .beginTransaction(projectId=project_id, body={}) + .beginTransaction(projectId=project_id, body={"transactionOptions": transaction_options}) .execute(num_retries=self.num_retries)) return resp['transaction'] diff --git a/airflow/providers/google/cloud/hooks/pubsub.py b/airflow/providers/google/cloud/hooks/pubsub.py index 026a116a6b531..e595a8764ac29 100644 --- a/airflow/providers/google/cloud/hooks/pubsub.py +++ b/airflow/providers/google/cloud/hooks/pubsub.py @@ -28,7 +28,10 @@ from google.api_core.retry import Retry from google.cloud.exceptions import NotFound from google.cloud.pubsub_v1 import PublisherClient, SubscriberClient -from google.cloud.pubsub_v1.types import Duration, MessageStoragePolicy, PushConfig, ReceivedMessage +from google.cloud.pubsub_v1.types import ( + DeadLetterPolicy, Duration, ExpirationPolicy, MessageStoragePolicy, PushConfig, ReceivedMessage, + RetryPolicy, +) from googleapiclient.errors import HttpError from airflow.providers.google.common.hooks.base_google import GoogleBaseHook @@ -300,6 +303,11 @@ def create_subscription( retain_acked_messages: Optional[bool] = None, message_retention_duration: Optional[Union[Dict, Duration]] = None, labels: Optional[Dict[str, str]] = None, + enable_message_ordering: bool = False, + expiration_policy: Optional[Union[Dict, ExpirationPolicy]] = None, + filter_: Optional[str] = None, + dead_letter_policy: Optional[Union[Dict, DeadLetterPolicy]] = None, + retry_policy: Optional[Union[Dict, RetryPolicy]] = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, @@ -345,6 +353,32 @@ def create_subscription( :param labels: Client-assigned labels; see https://cloud.google.com/pubsub/docs/labels :type labels: Dict[str, str] + :param enable_message_ordering: If true, messages published with the same + ordering_key in PubsubMessage will be delivered to the subscribers in the order + in which they are received by the Pub/Sub system. Otherwise, they may be + delivered in any order. + :type enable_message_ordering: bool + :param expiration_policy: A policy that specifies the conditions for this + subscription’s expiration. A subscription is considered active as long as any + connected subscriber is successfully consuming messages from the subscription or + is issuing operations on the subscription. If expiration_policy is not set, + a default policy with ttl of 31 days will be used. The minimum allowed value for + expiration_policy.ttl is 1 day. + :type expiration_policy: Union[Dict, google.cloud.pubsub_v1.types.ExpirationPolicy`] + :param filter_: An expression written in the Cloud Pub/Sub filter language. If + non-empty, then only PubsubMessages whose attributes field matches the filter are + delivered on this subscription. If empty, then no messages are filtered out. + :type filter_: str + :param dead_letter_policy: A policy that specifies the conditions for dead lettering + messages in this subscription. If dead_letter_policy is not set, dead lettering is + disabled. + :type dead_letter_policy: Union[Dict, google.cloud.pubsub_v1.types.DeadLetterPolicy] + :param retry_policy: A policy that specifies how Pub/Sub retries message delivery + for this subscription. If not set, the default retry policy is applied. This + generally implies that messages will be retried as soon as possible for healthy + subscribers. RetryPolicy will be triggered on NACKs or acknowledgement deadline + exceeded events for a given message. + :type retry_policy: Union[Dict, google.cloud.pubsub_v1.types.RetryPolicy] :param retry: (Optional) A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry @@ -383,6 +417,11 @@ def create_subscription( retain_acked_messages=retain_acked_messages, message_retention_duration=message_retention_duration, labels=labels, + enable_message_ordering=enable_message_ordering, + expiration_policy=expiration_policy, + filter_=filter_, + dead_letter_policy=dead_letter_policy, + retry_policy=retry_policy, retry=retry, timeout=timeout, metadata=metadata, diff --git a/airflow/providers/google/cloud/operators/automl.py b/airflow/providers/google/cloud/operators/automl.py index 49d4c4c5cf7c9..aafa34ce2d8cf 100644 --- a/airflow/providers/google/cloud/operators/automl.py +++ b/airflow/providers/google/cloud/operators/automl.py @@ -64,7 +64,7 @@ class AutoMLTrainModelOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, model: dict, location: str, project_id: Optional[str] = None, @@ -72,10 +72,9 @@ def __init__( timeout: Optional[float] = None, retry: Optional[Retry] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.model = model self.location = location @@ -139,7 +138,7 @@ class AutoMLPredictOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, model_id: str, location: str, payload: dict, @@ -149,10 +148,9 @@ def __init__( timeout: Optional[float] = None, retry: Optional[Retry] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.model_id = model_id self.params = params # type: ignore @@ -232,7 +230,7 @@ class AutoMLBatchPredictOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, model_id: str, input_config: dict, output_config: dict, @@ -243,10 +241,9 @@ def __init__( # pylint: disable=too-many-arguments timeout: Optional[float] = None, retry: Optional[Retry] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.model_id = model_id self.location = location @@ -312,7 +309,7 @@ class AutoMLCreateDatasetOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, dataset: dict, location: str, project_id: Optional[str] = None, @@ -320,10 +317,9 @@ def __init__( timeout: Optional[float] = None, retry: Optional[Retry] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.dataset = dataset self.location = location @@ -388,7 +384,7 @@ class AutoMLImportDataOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, dataset_id: str, location: str, input_config: dict, @@ -397,10 +393,9 @@ def __init__( timeout: Optional[float] = None, retry: Optional[Retry] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.dataset_id = dataset_id self.input_config = input_config @@ -479,7 +474,7 @@ class AutoMLTablesListColumnSpecsOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, dataset_id: str, table_spec_id: str, location: str, @@ -491,10 +486,9 @@ def __init__( # pylint: disable=too-many-arguments timeout: Optional[float] = None, retry: Optional[Retry] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.dataset_id = dataset_id self.table_spec_id = table_spec_id self.field_mask = field_mask @@ -562,7 +556,7 @@ class AutoMLTablesUpdateDatasetOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, dataset: dict, location: str, update_mask: Optional[dict] = None, @@ -570,10 +564,9 @@ def __init__( timeout: Optional[float] = None, retry: Optional[Retry] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.dataset = dataset self.update_mask = update_mask @@ -630,7 +623,7 @@ class AutoMLGetModelOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, model_id: str, location: str, project_id: Optional[str] = None, @@ -638,10 +631,9 @@ def __init__( timeout: Optional[float] = None, retry: Optional[Retry] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.model_id = model_id self.location = location @@ -697,7 +689,7 @@ class AutoMLDeleteModelOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, model_id: str, location: str, project_id: Optional[str] = None, @@ -705,10 +697,9 @@ def __init__( timeout: Optional[float] = None, retry: Optional[Retry] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.model_id = model_id self.location = location @@ -774,7 +765,7 @@ class AutoMLDeployModelOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, model_id: str, location: str, project_id: Optional[str] = None, @@ -783,10 +774,9 @@ def __init__( timeout: Optional[float] = None, retry: Optional[Retry] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.model_id = model_id self.image_detection_metadata = image_detection_metadata @@ -854,7 +844,7 @@ class AutoMLTablesListTableSpecsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, dataset_id: str, location: str, page_size: Optional[int] = None, @@ -864,10 +854,9 @@ def __init__( timeout: Optional[float] = None, retry: Optional[Retry] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.dataset_id = dataset_id self.filter_ = filter_ self.page_size = page_size @@ -926,17 +915,16 @@ class AutoMLListDatasetOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, project_id: Optional[str] = None, metadata: Optional[MetaData] = None, timeout: Optional[float] = None, retry: Optional[Retry] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.project_id = project_id self.metadata = metadata @@ -997,7 +985,7 @@ class AutoMLDeleteDatasetOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, dataset_id: Union[str, List[str]], location: str, project_id: Optional[str] = None, @@ -1005,10 +993,9 @@ def __init__( timeout: Optional[float] = None, retry: Optional[Retry] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.dataset_id = dataset_id self.location = location diff --git a/airflow/providers/google/cloud/operators/bigquery.py b/airflow/providers/google/cloud/operators/bigquery.py index 47af082486892..ff3b14e2607a2 100644 --- a/airflow/providers/google/cloud/operators/bigquery.py +++ b/airflow/providers/google/cloud/operators/bigquery.py @@ -141,16 +141,15 @@ class BigQueryCheckOperator(CheckOperator): @apply_defaults def __init__( - self, + self, *, sql: str, gcp_conn_id: str = 'google_cloud_default', bigquery_conn_id: Optional[str] = None, use_legacy_sql: bool = True, location: Optional[str] = None, - *args, **kwargs, ) -> None: - super().__init__(sql=sql, *args, **kwargs) + super().__init__(sql=sql, **kwargs) if bigquery_conn_id: warnings.warn(_DEPRECATION_MSG, DeprecationWarning, stacklevel=3) gcp_conn_id = bigquery_conn_id @@ -197,7 +196,7 @@ class BigQueryValueCheckOperator(ValueCheckOperator): @apply_defaults def __init__( - self, + self, *, sql: str, pass_value: Any, tolerance: Any = None, @@ -205,14 +204,13 @@ def __init__( bigquery_conn_id: Optional[str] = None, use_legacy_sql: bool = True, location: Optional[str] = None, - *args, **kwargs, ) -> None: super().__init__( sql=sql, pass_value=pass_value, tolerance=tolerance, - *args, **kwargs + **kwargs ) if bigquery_conn_id: @@ -272,7 +270,7 @@ class BigQueryIntervalCheckOperator(IntervalCheckOperator): @apply_defaults def __init__( - self, + self, *, table: str, metrics_thresholds: dict, date_filter_column: str = 'ds', @@ -281,7 +279,6 @@ def __init__( bigquery_conn_id: Optional[str] = None, use_legacy_sql: bool = True, location: Optional[str] = None, - *args, **kwargs, ) -> None: super().__init__( @@ -289,7 +286,7 @@ def __init__( metrics_thresholds=metrics_thresholds, date_filter_column=date_filter_column, days_back=days_back, - *args, **kwargs + **kwargs ) if bigquery_conn_id: @@ -335,7 +332,7 @@ class BigQueryGetDataOperator(BaseOperator): task_id='get_data_from_bq', dataset_id='test_dataset', table_id='Transaction_partitions', - max_results='100', + max_results=100, selected_fields='DATE', gcp_conn_id='airflow-conn-id' ) @@ -367,7 +364,7 @@ class BigQueryGetDataOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, dataset_id: str, table_id: str, max_results: int = 100, @@ -376,10 +373,9 @@ def __init__( bigquery_conn_id: Optional[str] = None, delegate_to: Optional[str] = None, location: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if bigquery_conn_id: warnings.warn( @@ -532,6 +528,7 @@ def operator_extra_links(self): # pylint: disable=too-many-arguments, too-many-locals @apply_defaults def __init__(self, + *, sql: Union[str, Iterable], destination_dataset_table: Optional[str] = None, write_disposition: Optional[str] = 'WRITE_EMPTY', @@ -554,9 +551,8 @@ def __init__(self, cluster_fields: Optional[List[str]] = None, location: Optional[str] = None, encryption_configuration: Optional[dict] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if bigquery_conn_id: warnings.warn( @@ -788,7 +784,7 @@ class BigQueryCreateEmptyTableOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults def __init__( - self, + self, *, dataset_id: str, table_id: str, table_resource: Optional[Dict[str, Any]] = None, @@ -804,9 +800,9 @@ def __init__( encryption_configuration: Optional[Dict] = None, location: Optional[str] = None, cluster_fields: Optional[List[str]] = None, - *args, **kwargs + **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.dataset_id = dataset_id @@ -967,7 +963,7 @@ class BigQueryCreateExternalTableOperator(BaseOperator): # pylint: disable=too-many-arguments,too-many-locals @apply_defaults def __init__( - self, + self, *, bucket: str, source_objects: List, destination_project_dataset_table: str, @@ -989,9 +985,9 @@ def __init__( labels: Optional[Dict] = None, encryption_configuration: Optional[Dict] = None, location: Optional[str] = None, - *args, **kwargs + **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) # GCS config self.bucket = bucket @@ -1135,14 +1131,14 @@ class BigQueryDeleteDatasetOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, dataset_id: str, project_id: Optional[str] = None, delete_contents: bool = False, gcp_conn_id: str = 'google_cloud_default', bigquery_conn_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs + **kwargs ) -> None: if bigquery_conn_id: warnings.warn( @@ -1156,7 +1152,7 @@ def __init__( self.gcp_conn_id = gcp_conn_id self.delegate_to = delegate_to - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def execute(self, context): self.log.info('Dataset id: %s Project id: %s', self.dataset_id, self.project_id) @@ -1215,6 +1211,7 @@ class BigQueryCreateEmptyDatasetOperator(BaseOperator): @apply_defaults def __init__(self, + *, dataset_id: Optional[str] = None, project_id: Optional[str] = None, dataset_reference: Optional[Dict] = None, @@ -1222,7 +1219,7 @@ def __init__(self, gcp_conn_id: str = 'google_cloud_default', bigquery_conn_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs) -> None: + **kwargs) -> None: if bigquery_conn_id: warnings.warn( @@ -1237,7 +1234,7 @@ def __init__(self, self.dataset_reference = dataset_reference if dataset_reference else {} self.delegate_to = delegate_to - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def execute(self, context): bq_hook = BigQueryHook( @@ -1284,16 +1281,17 @@ class BigQueryGetDatasetOperator(BaseOperator): @apply_defaults def __init__(self, + *, dataset_id: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: + **kwargs) -> None: self.dataset_id = dataset_id self.project_id = project_id self.gcp_conn_id = gcp_conn_id self.delegate_to = delegate_to - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def execute(self, context): bq_hook = BigQueryHook(gcp_conn_id=self.gcp_conn_id, @@ -1334,20 +1332,20 @@ class BigQueryGetDatasetTablesOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, dataset_id: str, project_id: Optional[str] = None, max_results: Optional[int] = None, gcp_conn_id: Optional[str] = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs + **kwargs ) -> None: self.dataset_id = dataset_id self.project_id = project_id self.max_results = max_results self.gcp_conn_id = gcp_conn_id self.delegate_to = delegate_to - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def execute(self, context): bq_hook = BigQueryHook( @@ -1391,13 +1389,13 @@ class BigQueryPatchDatasetOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, dataset_id: str, dataset_resource: dict, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs, + **kwargs, ) -> None: warnings.warn( @@ -1409,7 +1407,7 @@ def __init__( self.gcp_conn_id = gcp_conn_id self.dataset_resource = dataset_resource self.delegate_to = delegate_to - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def execute(self, context): bq_hook = BigQueryHook( @@ -1458,14 +1456,14 @@ class BigQueryUpdateDatasetOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, dataset_resource: dict, fields: Optional[List[str]] = None, dataset_id: Optional[str] = None, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs + **kwargs ) -> None: self.dataset_id = dataset_id self.project_id = project_id @@ -1473,7 +1471,7 @@ def __init__( self.gcp_conn_id = gcp_conn_id self.dataset_resource = dataset_resource self.delegate_to = delegate_to - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def execute(self, context): bq_hook = BigQueryHook( @@ -1523,17 +1521,16 @@ class BigQueryDeleteTableOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, deletion_dataset_table: str, gcp_conn_id: str = 'google_cloud_default', bigquery_conn_id: Optional[str] = None, delegate_to: Optional[str] = None, ignore_if_missing: bool = False, location: Optional[str] = None, - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if bigquery_conn_id: warnings.warn( @@ -1595,7 +1592,7 @@ class BigQueryUpsertTableOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, dataset_id: str, table_resource: dict, project_id: Optional[str] = None, @@ -1603,10 +1600,9 @@ def __init__( bigquery_conn_id: Optional[str] = None, delegate_to: Optional[str] = None, location: Optional[str] = None, - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if bigquery_conn_id: warnings.warn( @@ -1675,10 +1671,9 @@ def __init__( job_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.configuration = configuration self.location = location self.job_id = job_id diff --git a/airflow/providers/google/cloud/operators/bigquery_dts.py b/airflow/providers/google/cloud/operators/bigquery_dts.py index 0c904500a93aa..3acfc48e21dbf 100644 --- a/airflow/providers/google/cloud/operators/bigquery_dts.py +++ b/airflow/providers/google/cloud/operators/bigquery_dts.py @@ -66,7 +66,7 @@ class BigQueryCreateDataTransferOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, transfer_config: dict, project_id: Optional[str] = None, authorization_code: Optional[str] = None, @@ -74,10 +74,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id="google_cloud_default", - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.transfer_config = transfer_config self.authorization_code = authorization_code self.project_id = project_id @@ -133,17 +132,16 @@ class BigQueryDeleteDataTransferConfigOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, transfer_config_id: str, project_id: Optional[str] = None, retry: Retry = None, timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id="google_cloud_default", - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.transfer_config_id = transfer_config_id self.retry = retry @@ -210,7 +208,7 @@ class BigQueryDataTransferServiceStartTransferRunsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, transfer_config_id: str, project_id: Optional[str] = None, requested_time_range: Optional[dict] = None, @@ -219,10 +217,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id="google_cloud_default", - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.transfer_config_id = transfer_config_id self.requested_time_range = requested_time_range diff --git a/airflow/providers/google/cloud/operators/bigtable.py b/airflow/providers/google/cloud/operators/bigtable.py index a4ed5d92b8edf..7098fb1993f7b 100644 --- a/airflow/providers/google/cloud/operators/bigtable.py +++ b/airflow/providers/google/cloud/operators/bigtable.py @@ -94,7 +94,7 @@ class BigtableCreateInstanceOperator(BaseOperator, BigtableValidationMixin): template_fields: Iterable[str] = ['project_id', 'instance_id', 'main_cluster_id', 'main_cluster_zone'] @apply_defaults - def __init__(self, # pylint: disable=too-many-arguments + def __init__(self, *, # pylint: disable=too-many-arguments instance_id: str, main_cluster_id: str, main_cluster_zone: str, @@ -108,7 +108,7 @@ def __init__(self, # pylint: disable=too-many-arguments cluster_storage_type: Optional[enums.StorageType] = None, timeout: Optional[float] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs) -> None: + **kwargs) -> None: self.project_id = project_id self.instance_id = instance_id self.main_cluster_id = main_cluster_id @@ -123,7 +123,7 @@ def __init__(self, # pylint: disable=too-many-arguments self.timeout = timeout self._validate_inputs() self.gcp_conn_id = gcp_conn_id - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def execute(self, context): hook = BigtableHook(gcp_conn_id=self.gcp_conn_id) @@ -181,16 +181,16 @@ class BigtableDeleteInstanceOperator(BaseOperator, BigtableValidationMixin): template_fields = ['project_id', 'instance_id'] # type: Iterable[str] @apply_defaults - def __init__(self, + def __init__(self, *, instance_id: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs) -> None: + **kwargs) -> None: self.project_id = project_id self.instance_id = instance_id self._validate_inputs() self.gcp_conn_id = gcp_conn_id - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def execute(self, context): hook = BigtableHook(gcp_conn_id=self.gcp_conn_id) @@ -241,14 +241,14 @@ class BigtableCreateTableOperator(BaseOperator, BigtableValidationMixin): template_fields = ['project_id', 'instance_id', 'table_id'] # type: Iterable[str] @apply_defaults - def __init__(self, + def __init__(self, *, instance_id: str, table_id: str, project_id: Optional[str] = None, initial_split_keys: Optional[List] = None, column_families: Optional[Dict[str, GarbageCollectionRule]] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs) -> None: + **kwargs) -> None: self.project_id = project_id self.instance_id = instance_id self.table_id = table_id @@ -256,7 +256,7 @@ def __init__(self, self.column_families = column_families or {} self._validate_inputs() self.gcp_conn_id = gcp_conn_id - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def _compare_column_families(self, hook, instance): table_column_families = hook.get_column_families_for_table(instance, self.table_id) @@ -331,20 +331,20 @@ class BigtableDeleteTableOperator(BaseOperator, BigtableValidationMixin): template_fields = ['project_id', 'instance_id', 'table_id'] # type: Iterable[str] @apply_defaults - def __init__(self, + def __init__(self, *, instance_id: str, table_id: str, project_id: Optional[str] = None, app_profile_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs) -> None: + **kwargs) -> None: self.project_id = project_id self.instance_id = instance_id self.table_id = table_id self.app_profile_id = app_profile_id self._validate_inputs() self.gcp_conn_id = gcp_conn_id - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def execute(self, context): hook = BigtableHook(gcp_conn_id=self.gcp_conn_id) @@ -396,20 +396,20 @@ class BigtableUpdateClusterOperator(BaseOperator, BigtableValidationMixin): template_fields = ['project_id', 'instance_id', 'cluster_id', 'nodes'] # type: Iterable[str] @apply_defaults - def __init__(self, + def __init__(self, *, instance_id: str, cluster_id: str, nodes: int, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs) -> None: + **kwargs) -> None: self.project_id = project_id self.instance_id = instance_id self.cluster_id = cluster_id self.nodes = nodes self._validate_inputs() self.gcp_conn_id = gcp_conn_id - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def execute(self, context): hook = BigtableHook(gcp_conn_id=self.gcp_conn_id) diff --git a/airflow/providers/google/cloud/operators/cloud_build.py b/airflow/providers/google/cloud/operators/cloud_build.py index fad1f0672e569..9d1e86a403d8c 100644 --- a/airflow/providers/google/cloud/operators/cloud_build.py +++ b/airflow/providers/google/cloud/operators/cloud_build.py @@ -184,13 +184,13 @@ class CloudBuildCreateBuildOperator(BaseOperator): template_ext = ['.yml', '.yaml', '.json'] @apply_defaults - def __init__(self, + def __init__(self, *, body: Union[dict, str], project_id: Optional[str] = None, gcp_conn_id: str = "google_cloud_default", api_version: str = "v1", - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.body = body # Not template fields to keep original value self.body_raw = body diff --git a/airflow/providers/google/cloud/operators/cloud_memorystore.py b/airflow/providers/google/cloud/operators/cloud_memorystore.py index e4614b472d167..ebd498a03467e 100644 --- a/airflow/providers/google/cloud/operators/cloud_memorystore.py +++ b/airflow/providers/google/cloud/operators/cloud_memorystore.py @@ -83,7 +83,7 @@ class CloudMemorystoreCreateInstanceOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, instance_id: str, instance: Union[Dict, Instance], @@ -92,10 +92,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.instance_id = instance_id self.instance = instance @@ -150,7 +149,7 @@ class CloudMemorystoreDeleteInstanceOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, instance: str, project_id: Optional[str] = None, @@ -158,10 +157,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.instance = instance self.project_id = project_id @@ -228,7 +226,7 @@ class CloudMemorystoreExportInstanceOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, instance: str, output_config: Union[Dict, OutputConfig], @@ -237,10 +235,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.instance = instance self.output_config = output_config @@ -307,7 +304,7 @@ class CloudMemorystoreFailoverInstanceOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, instance: str, data_protection_mode: FailoverInstanceRequest.DataProtectionMode, @@ -316,10 +313,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.instance = instance self.data_protection_mode = data_protection_mode @@ -372,7 +368,7 @@ class CloudMemorystoreGetInstanceOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, instance: str, project_id: Optional[str] = None, @@ -380,10 +376,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.instance = instance self.project_id = project_id @@ -452,7 +447,7 @@ class CloudMemorystoreImportOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, instance: str, input_config: Union[Dict, InputConfig], @@ -461,10 +456,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.instance = instance self.input_config = input_config @@ -521,7 +515,7 @@ class CloudMemorystoreListInstancesOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, page_size: int, project_id: Optional[str] = None, @@ -529,10 +523,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.page_size = page_size self.project_id = project_id @@ -613,7 +606,7 @@ class CloudMemorystoreUpdateInstanceOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, update_mask: Union[Dict, FieldMask], instance: Union[Dict, Instance], location: Optional[str] = None, @@ -623,10 +616,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.update_mask = update_mask self.instance = instance self.location = location @@ -693,7 +685,7 @@ class CloudMemorystoreScaleInstanceOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, memory_size_gb: int, location: Optional[str] = None, instance_id: Optional[str] = None, @@ -702,10 +694,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.memory_size_gb = memory_size_gb self.location = location self.instance_id = instance_id @@ -792,7 +783,7 @@ class CloudMemorystoreCreateInstanceAndImportOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, instance_id: str, instance: Union[Dict, Instance], @@ -802,10 +793,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.instance_id = instance_id self.instance = instance @@ -887,7 +877,7 @@ class CloudMemorystoreExportAndDeleteInstanceOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, instance: str, output_config: Union[Dict, OutputConfig], @@ -896,10 +886,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.instance = instance self.output_config = output_config diff --git a/airflow/providers/google/cloud/operators/cloud_sql.py b/airflow/providers/google/cloud/operators/cloud_sql.py index 1ff487e8c7264..c75575683bb9d 100644 --- a/airflow/providers/google/cloud/operators/cloud_sql.py +++ b/airflow/providers/google/cloud/operators/cloud_sql.py @@ -154,7 +154,7 @@ class CloudSQLBaseOperator(BaseOperator): :type api_version: str """ @apply_defaults - def __init__(self, + def __init__(self, *, instance: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', @@ -236,7 +236,7 @@ class CloudSQLCreateInstanceOperator(CloudSQLBaseOperator): # [END gcp_sql_create_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, body: dict, instance: str, project_id: Optional[str] = None, @@ -317,7 +317,7 @@ class CloudSQLInstancePatchOperator(CloudSQLBaseOperator): # [END gcp_sql_patch_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, body: dict, instance: str, project_id: Optional[str] = None, @@ -373,7 +373,7 @@ class CloudSQLDeleteInstanceOperator(CloudSQLBaseOperator): # [END gcp_sql_delete_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, instance: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', @@ -426,7 +426,7 @@ class CloudSQLCreateInstanceDatabaseOperator(CloudSQLBaseOperator): # [END gcp_sql_db_create_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, instance: str, body: dict, project_id: Optional[str] = None, @@ -505,7 +505,7 @@ class CloudSQLPatchInstanceDatabaseOperator(CloudSQLBaseOperator): # [END gcp_sql_db_patch_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, instance: str, database: str, body: dict, @@ -578,7 +578,7 @@ class CloudSQLDeleteInstanceDatabaseOperator(CloudSQLBaseOperator): # [END gcp_sql_db_delete_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, instance: str, database: str, project_id: Optional[str] = None, @@ -644,7 +644,7 @@ class CloudSQLExportInstanceOperator(CloudSQLBaseOperator): # [END gcp_sql_export_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, instance: str, body: dict, project_id: Optional[str] = None, @@ -724,7 +724,7 @@ class CloudSQLImportInstanceOperator(CloudSQLBaseOperator): # [END gcp_sql_import_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, instance: str, body: dict, project_id: Optional[str] = None, @@ -795,7 +795,7 @@ class CloudSQLExecuteQueryOperator(BaseOperator): # [END gcp_sql_query_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, sql: Union[List[str], str], autocommit: bool = False, parameters: Optional[Union[Dict, Iterable]] = None, diff --git a/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py b/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py index 96bbc3b995f28..f2ad3199f9907 100644 --- a/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +++ b/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py @@ -197,15 +197,14 @@ class CloudDataTransferServiceCreateJobOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, body: dict, aws_conn_id: str = 'aws_default', gcp_conn_id: str = 'google_cloud_default', api_version: str = 'v1', - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.body = deepcopy(body) self.aws_conn_id = aws_conn_id self.gcp_conn_id = gcp_conn_id @@ -256,16 +255,15 @@ class CloudDataTransferServiceUpdateJobOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, job_name: str, body: dict, aws_conn_id: str = 'aws_default', gcp_conn_id: str = 'google_cloud_default', api_version: str = 'v1', - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.job_name = job_name self.body = body self.gcp_conn_id = gcp_conn_id @@ -313,15 +311,14 @@ class CloudDataTransferServiceDeleteJobOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, job_name: str, gcp_conn_id: str = "google_cloud_default", api_version: str = "v1", project_id: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.job_name = job_name self.project_id = project_id self.gcp_conn_id = gcp_conn_id @@ -361,14 +358,13 @@ class CloudDataTransferServiceGetOperationOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, operation_name: str, gcp_conn_id: str = "google_cloud_default", api_version: str = "v1", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.operation_name = operation_name self.gcp_conn_id = gcp_conn_id self.api_version = api_version @@ -410,7 +406,6 @@ def __init__(self, request_filter: Optional[Dict] = None, gcp_conn_id: str = 'google_cloud_default', api_version: str = 'v1', - *args, **kwargs) -> None: # To preserve backward compatibility # TODO: remove one day @@ -421,7 +416,7 @@ def __init__(self, else: TypeError("__init__() missing 1 required positional argument: 'request_filter'") - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.filter = request_filter self.gcp_conn_id = gcp_conn_id self.api_version = api_version @@ -459,14 +454,13 @@ class CloudDataTransferServicePauseOperationOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, operation_name: str, gcp_conn_id: str = "google_cloud_default", api_version: str = "v1", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.operation_name = operation_name self.gcp_conn_id = gcp_conn_id self.api_version = api_version @@ -502,18 +496,17 @@ class CloudDataTransferServiceResumeOperationOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, operation_name: str, gcp_conn_id: str = "google_cloud_default", api_version: str = "v1", - *args, **kwargs ) -> None: self.operation_name = operation_name self.gcp_conn_id = gcp_conn_id self.api_version = api_version self._validate_inputs() - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def _validate_inputs(self): if not self.operation_name: @@ -546,14 +539,13 @@ class CloudDataTransferServiceCancelOperationOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, operation_name: str, gcp_conn_id: str = "google_cloud_default", api_version: str = "v1", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.operation_name = operation_name self.api_version = api_version self.gcp_conn_id = gcp_conn_id @@ -635,7 +627,7 @@ class CloudDataTransferServiceS3ToGCSOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, s3_bucket: str, gcs_bucket: str, project_id: Optional[str] = None, @@ -648,11 +640,10 @@ def __init__( # pylint: disable=too-many-arguments transfer_options: Optional[Dict] = None, wait: bool = True, timeout: Optional[float] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.s3_bucket = s3_bucket self.gcs_bucket = gcs_bucket self.project_id = project_id @@ -778,7 +769,7 @@ class CloudDataTransferServiceGCSToGCSOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, source_bucket: str, destination_bucket: str, project_id: Optional[str] = None, @@ -790,11 +781,10 @@ def __init__( # pylint: disable=too-many-arguments transfer_options: Optional[Dict] = None, wait: bool = True, timeout: Optional[float] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.source_bucket = source_bucket self.destination_bucket = destination_bucket self.project_id = project_id diff --git a/airflow/providers/google/cloud/operators/compute.py b/airflow/providers/google/cloud/operators/compute.py index 9b239ab278604..33a247268991d 100644 --- a/airflow/providers/google/cloud/operators/compute.py +++ b/airflow/providers/google/cloud/operators/compute.py @@ -39,7 +39,7 @@ class ComputeEngineBaseOperator(BaseOperator): """ @apply_defaults - def __init__(self, + def __init__(self, *, zone: str, resource_id: str, project_id: Optional[str] = None, @@ -96,7 +96,7 @@ class ComputeEngineStartInstanceOperator(ComputeEngineBaseOperator): # [END gce_instance_start_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, zone: str, resource_id: str, project_id: Optional[str] = None, @@ -144,7 +144,7 @@ class ComputeEngineStopInstanceOperator(ComputeEngineBaseOperator): # [END gce_instance_stop_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, zone: str, resource_id: str, project_id: Optional[str] = None, @@ -202,7 +202,7 @@ class ComputeEngineSetMachineTypeOperator(ComputeEngineBaseOperator): # [END gce_instance_set_machine_type_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, zone: str, resource_id: str, body: dict, @@ -324,7 +324,7 @@ class ComputeEngineCopyInstanceTemplateOperator(ComputeEngineBaseOperator): # [END gce_instance_template_copy_operator_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, resource_id: str, body_patch: dict, project_id: Optional[str] = None, @@ -434,7 +434,7 @@ class ComputeEngineInstanceGroupUpdateManagerTemplateOperator(ComputeEngineBaseO # [END gce_igm_update_template_operator_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, resource_id: str, zone: str, source_template: str, diff --git a/airflow/providers/google/cloud/operators/datacatalog.py b/airflow/providers/google/cloud/operators/datacatalog.py index 5c3e0644939f9..84f81120167b7 100644 --- a/airflow/providers/google/cloud/operators/datacatalog.py +++ b/airflow/providers/google/cloud/operators/datacatalog.py @@ -82,7 +82,7 @@ class CloudDataCatalogCreateEntryOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, entry_group: str, entry_id: str, @@ -92,10 +92,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.entry_group = entry_group self.entry_id = entry_id @@ -186,7 +185,7 @@ class CloudDataCatalogCreateEntryGroupOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, entry_group_id: str, entry_group: Union[Dict, EntryGroup], @@ -195,10 +194,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.entry_group_id = entry_group_id self.entry_group = entry_group @@ -290,7 +288,7 @@ class CloudDataCatalogCreateTagOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, location: str, entry_group: str, entry: str, @@ -301,10 +299,9 @@ def __init__( # pylint: disable=too-many-arguments timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.entry_group = entry_group self.entry = entry @@ -407,7 +404,7 @@ class CloudDataCatalogCreateTagTemplateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, tag_template_id: str, tag_template: Union[Dict, TagTemplate], @@ -416,10 +413,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.tag_template_id = tag_template_id self.tag_template = tag_template @@ -511,7 +507,7 @@ class CloudDataCatalogCreateTagTemplateFieldOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, tag_template: str, tag_template_field_id: str, @@ -521,10 +517,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.tag_template = tag_template self.tag_template_field_id = tag_template_field_id @@ -608,7 +603,7 @@ class CloudDataCatalogDeleteEntryOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, entry_group: str, entry: str, @@ -617,10 +612,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.entry_group = entry_group self.entry = entry @@ -680,7 +674,7 @@ class CloudDataCatalogDeleteEntryGroupOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, entry_group: str, project_id: Optional[str] = None, @@ -688,10 +682,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.entry_group = entry_group self.project_id = project_id @@ -761,7 +754,7 @@ class CloudDataCatalogDeleteTagOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, entry_group: str, entry: str, @@ -771,10 +764,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.entry_group = entry_group self.entry = entry @@ -847,7 +839,7 @@ class CloudDataCatalogDeleteTagTemplateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, tag_template: str, force: bool, @@ -856,10 +848,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.tag_template = tag_template self.force = force @@ -931,7 +922,7 @@ class CloudDataCatalogDeleteTagTemplateFieldOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, tag_template: str, field: str, @@ -941,10 +932,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.tag_template = tag_template self.field = field @@ -1015,7 +1005,7 @@ class CloudDataCatalogGetEntryOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, entry_group: str, entry: str, @@ -1024,10 +1014,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.entry_group = entry_group self.entry = entry @@ -1097,7 +1086,7 @@ class CloudDataCatalogGetEntryGroupOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, entry_group: str, read_mask: Union[Dict, FieldMask], @@ -1106,10 +1095,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.entry_group = entry_group self.read_mask = read_mask @@ -1173,7 +1161,7 @@ class CloudDataCatalogGetTagTemplateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, tag_template: str, project_id: Optional[str] = None, @@ -1181,10 +1169,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.tag_template = tag_template self.project_id = project_id @@ -1254,7 +1241,7 @@ class CloudDataCatalogListTagsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, entry_group: str, entry: str, @@ -1264,10 +1251,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.entry_group = entry_group self.entry = entry @@ -1335,7 +1321,7 @@ class CloudDataCatalogLookupEntryOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, linked_resource: Optional[str] = None, sql_resource: Optional[str] = None, project_id: Optional[str] = None, @@ -1343,10 +1329,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.linked_resource = linked_resource self.sql_resource = sql_resource self.project_id = project_id @@ -1415,7 +1400,7 @@ class CloudDataCatalogRenameTagTemplateFieldOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, tag_template: str, field: str, @@ -1425,10 +1410,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.tag_template = tag_template self.field = field @@ -1523,7 +1507,7 @@ class CloudDataCatalogSearchCatalogOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, scope: Union[Dict, SearchCatalogRequest.Scope], query: str, page_size: int = 100, @@ -1532,10 +1516,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.scope = scope self.query = query self.page_size = page_size @@ -1615,7 +1598,7 @@ class CloudDataCatalogUpdateEntryOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, entry: Union[Dict, Entry], update_mask: Union[Dict, FieldMask], location: Optional[str] = None, @@ -1626,10 +1609,9 @@ def __init__( # pylint: disable=too-many-arguments timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.entry = entry self.update_mask = update_mask self.location = location @@ -1713,7 +1695,7 @@ class CloudDataCatalogUpdateTagOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, tag: Union[Dict, Tag], update_mask: Union[Dict, FieldMask], location: Optional[str] = None, @@ -1725,10 +1707,9 @@ def __init__( # pylint: disable=too-many-arguments timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.tag = tag self.update_mask = update_mask self.location = location @@ -1815,7 +1796,7 @@ class CloudDataCatalogUpdateTagTemplateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, tag_template: Union[Dict, TagTemplate], update_mask: Union[Dict, FieldMask], location: Optional[str] = None, @@ -1825,10 +1806,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.tag_template = tag_template self.update_mask = update_mask self.location = location @@ -1919,7 +1899,7 @@ class CloudDataCatalogUpdateTagTemplateFieldOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, tag_template_field: Union[Dict, TagTemplateField], update_mask: Union[Dict, FieldMask], tag_template_field_name: Optional[str] = None, @@ -1931,10 +1911,9 @@ def __init__( # pylint: disable=too-many-arguments timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.tag_template_field_name = tag_template_field_name self.location = location self.tag_template = tag_template diff --git a/airflow/providers/google/cloud/operators/dataflow.py b/airflow/providers/google/cloud/operators/dataflow.py index 04bf2979bc3ff..85471e3c71941 100644 --- a/airflow/providers/google/cloud/operators/dataflow.py +++ b/airflow/providers/google/cloud/operators/dataflow.py @@ -179,23 +179,21 @@ class DataflowCreateJavaJobOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults - def __init__( - self, - jar: str, - job_name: str = '{{task.task_id}}', - dataflow_default_options: Optional[dict] = None, - options: Optional[dict] = None, - project_id: Optional[str] = None, - location: str = DEFAULT_DATAFLOW_LOCATION, - gcp_conn_id: str = 'google_cloud_default', - delegate_to: Optional[str] = None, - poll_sleep: int = 10, - job_class: Optional[str] = None, - check_if_running: CheckJobRunning = CheckJobRunning.WaitForRun, - multiple_jobs: Optional[bool] = None, - *args, - **kwargs) -> None: - super().__init__(*args, **kwargs) + def __init__(self, *, + jar: str, + job_name: str = '{{task.task_id}}', + dataflow_default_options: Optional[dict] = None, + options: Optional[dict] = None, + project_id: Optional[str] = None, + location: str = DEFAULT_DATAFLOW_LOCATION, + gcp_conn_id: str = 'google_cloud_default', + delegate_to: Optional[str] = None, + poll_sleep: int = 10, + job_class: Optional[str] = None, + check_if_running: CheckJobRunning = CheckJobRunning.WaitForRun, + multiple_jobs: Optional[bool] = None, + **kwargs) -> None: + super().__init__(**kwargs) dataflow_default_options = dataflow_default_options or {} options = options or {} @@ -365,7 +363,7 @@ class DataflowTemplatedJobStartOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, template: str, job_name: str = '{{task.task_id}}', options: Optional[Dict[str, Any]] = None, @@ -376,9 +374,8 @@ def __init__( # pylint: disable=too-many-arguments gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, poll_sleep: int = 10, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.template = template self.job_name = job_name self.options = options or {} @@ -494,7 +491,7 @@ class DataflowCreatePythonJobOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, py_file: str, job_name: str = '{{task.task_id}}', dataflow_default_options: Optional[dict] = None, @@ -508,10 +505,9 @@ def __init__( # pylint: disable=too-many-arguments gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, poll_sleep: int = 10, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.py_file = py_file self.job_name = job_name diff --git a/airflow/providers/google/cloud/operators/datafusion.py b/airflow/providers/google/cloud/operators/datafusion.py index 093721d55ec11..4be569f0b1fd7 100644 --- a/airflow/providers/google/cloud/operators/datafusion.py +++ b/airflow/providers/google/cloud/operators/datafusion.py @@ -57,17 +57,16 @@ class CloudDataFusionRestartInstanceOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, instance_name: str, location: str, project_id: Optional[str] = None, api_version: str = "v1beta1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.instance_name = instance_name self.location = location self.project_id = project_id @@ -118,17 +117,16 @@ class CloudDataFusionDeleteInstanceOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, instance_name: str, location: str, project_id: Optional[str] = None, api_version: str = "v1beta1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.instance_name = instance_name self.location = location self.project_id = project_id @@ -182,7 +180,7 @@ class CloudDataFusionCreateInstanceOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, instance_name: str, instance: Dict[str, Any], location: str, @@ -190,10 +188,9 @@ def __init__( api_version: str = "v1beta1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.instance_name = instance_name self.instance = instance self.location = location @@ -273,7 +270,7 @@ class CloudDataFusionUpdateInstanceOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, instance_name: str, instance: Dict[str, Any], update_mask: str, @@ -282,10 +279,9 @@ def __init__( api_version: str = "v1beta1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.update_mask = update_mask self.instance_name = instance_name self.instance = instance @@ -340,17 +336,16 @@ class CloudDataFusionGetInstanceOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, instance_name: str, location: str, project_id: Optional[str] = None, api_version: str = "v1beta1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.instance_name = instance_name self.location = location self.project_id = project_id @@ -407,7 +402,7 @@ class CloudDataFusionCreatePipelineOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, pipeline_name: str, pipeline: Dict[str, Any], instance_name: str, @@ -417,10 +412,9 @@ def __init__( api_version: str = "v1beta1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.pipeline_name = pipeline_name self.pipeline = pipeline self.namespace = namespace @@ -486,7 +480,7 @@ class CloudDataFusionDeletePipelineOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, pipeline_name: str, instance_name: str, location: str, @@ -496,10 +490,9 @@ def __init__( api_version: str = "v1beta1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.pipeline_name = pipeline_name self.version_id = version_id self.namespace = namespace @@ -566,7 +559,7 @@ class CloudDataFusionListPipelinesOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, instance_name: str, location: str, artifact_name: Optional[str] = None, @@ -576,10 +569,9 @@ def __init__( api_version: str = "v1beta1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.artifact_version = artifact_version self.artifact_name = artifact_name self.namespace = namespace @@ -652,7 +644,7 @@ class CloudDataFusionStartPipelineOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, pipeline_name: str, instance_name: str, location: str, @@ -664,10 +656,9 @@ def __init__( # pylint: disable=too-many-arguments api_version: str = "v1beta1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.pipeline_name = pipeline_name self.success_states = success_states self.runtime_args = runtime_args @@ -743,7 +734,7 @@ class CloudDataFusionStopPipelineOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, pipeline_name: str, instance_name: str, location: str, @@ -752,10 +743,9 @@ def __init__( api_version: str = "v1beta1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.pipeline_name = pipeline_name self.namespace = namespace self.instance_name = instance_name diff --git a/airflow/providers/google/cloud/operators/dataproc.py b/airflow/providers/google/cloud/operators/dataproc.py index 565c7e6c87af0..113fe63336f02 100644 --- a/airflow/providers/google/cloud/operators/dataproc.py +++ b/airflow/providers/google/cloud/operators/dataproc.py @@ -31,8 +31,8 @@ from datetime import datetime, timedelta from typing import Dict, List, Optional, Sequence, Set, Tuple, Union -from google.api_core.exceptions import AlreadyExists -from google.api_core.retry import Retry +from google.api_core.exceptions import AlreadyExists, NotFound +from google.api_core.retry import Retry, exponential_sleep_generator from google.cloud.dataproc_v1beta2.types import ( # pylint: disable=no-name-in-module Cluster, Duration, FieldMask, ) @@ -191,7 +191,6 @@ def __init__(self, auto_delete_time: Optional[datetime] = None, auto_delete_ttl: Optional[int] = None, customer_managed_key: Optional[str] = None, - *args, # just in case **kwargs ) -> None: @@ -419,9 +418,14 @@ def make(self): class DataprocCreateClusterOperator(BaseOperator): """ Create a new cluster on Google Cloud Dataproc. The operator will wait until the - creation is successful or an error occurs in the creation process. + creation is successful or an error occurs in the creation process. If the cluster + already exists and ``use_if_exists`` is True then the operator will: - The parameters allow to configure the cluster. Please refer to + - if cluster state is ERROR then delete it if specified and raise error + - if cluster state is CREATING wait for it and then check for ERROR state + - if cluster state is DELETING wait for it and then create new cluster + + Please refer to https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters @@ -437,6 +441,11 @@ class DataprocCreateClusterOperator(BaseOperator): :type project_id: str :param region: leave as 'global', might become relevant in the future. (templated) :type region: str + :parm delete_on_error: If true the cluster will be deleted if created with ERROR state. Default + value is true. + :type delete_on_error: bool + :parm use_if_exists: If true use existing cluster + :type use_if_exists: bool :param request_id: Optional. A unique id used to identify the request. If the server receives two ``DeleteClusterRequest`` requests with the same id, then the second request will be ignored and the first ``google.longrunning.Operation`` created and stored in the backend is returned. @@ -456,17 +465,21 @@ class DataprocCreateClusterOperator(BaseOperator): template_fields = ('project_id', 'region', 'cluster') @apply_defaults - def __init__(self, - region: str = 'global', - project_id: Optional[str] = None, - cluster: Optional[Dict] = None, - request_id: Optional[str] = None, - retry: Optional[Retry] = None, - timeout: Optional[float] = None, - metadata: Optional[Sequence[Tuple[str, str]]] = None, - gcp_conn_id: str = "google_cloud_default", - *args, - **kwargs) -> None: + def __init__( # pylint: disable=too-many-arguments + self, + *, + region: str = 'global', + project_id: Optional[str] = None, + cluster: Optional[Dict] = None, + request_id: Optional[str] = None, + delete_on_error: bool = True, + use_if_exists: bool = True, + retry: Optional[Retry] = None, + timeout: float = 1 * 60 * 60, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + gcp_conn_id: str = "google_cloud_default", + **kwargs + ) -> None: # TODO: remove one day if cluster is None: warnings.warn( @@ -491,10 +504,13 @@ def __init__(self, if arg in kwargs: del kwargs[arg] - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.cluster = cluster - self.cluster_name = cluster.get('cluster_name') + try: + self.cluster_name = cluster['cluster_name'] + except KeyError: + raise AirflowException("`config` misses `cluster_name` key") self.project_id = project_id self.region = region self.request_id = request_id @@ -502,32 +518,113 @@ def __init__(self, self.timeout = timeout self.metadata = metadata self.gcp_conn_id = gcp_conn_id + self.delete_on_error = delete_on_error + self.use_if_exists = use_if_exists + + def _create_cluster(self, hook): + operation = hook.create_cluster( + project_id=self.project_id, + region=self.region, + cluster=self.cluster, + request_id=self.request_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + cluster = operation.result() + self.log.info("Cluster created.") + return cluster + + def _delete_cluster(self, hook): + self.log.info("Deleting the cluster") + hook.delete_cluster( + region=self.region, + cluster_name=self.cluster_name, + project_id=self.project_id, + ) + + def _get_cluster(self, hook: DataprocHook): + return hook.get_cluster( + project_id=self.project_id, + region=self.region, + cluster_name=self.cluster_name, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + + def _handle_error_state(self, hook: DataprocHook, cluster: Cluster) -> None: + if cluster.status.state != cluster.status.ERROR: + return + self.log.info("Cluster is in ERROR state") + gcs_uri = hook.diagnose_cluster( + region=self.region, + cluster_name=self.cluster_name, + project_id=self.project_id, + ) + self.log.info( + 'Diagnostic information for cluster %s available at: %s', + self.cluster_name, gcs_uri + ) + if self.delete_on_error: + self._delete_cluster(hook) + raise AirflowException("Cluster was created but was in ERROR state.") + raise AirflowException("Cluster was created but is in ERROR state") + + def _wait_for_cluster_in_deleting_state(self, hook: DataprocHook) -> None: + time_left = self.timeout + for time_to_sleep in exponential_sleep_generator(initial=10, maximum=120): + if time_left < 0: + raise AirflowException( + f"Cluster {self.cluster_name} is still DELETING state, aborting" + ) + time.sleep(time_to_sleep) + time_left = time_left - time_to_sleep + try: + self._get_cluster(hook) + except NotFound: + break + + def _wait_for_cluster_in_creating_state(self, hook: DataprocHook) -> Cluster: + time_left = self.timeout + cluster = self._get_cluster(hook) + for time_to_sleep in exponential_sleep_generator(initial=10, maximum=120): + if cluster.status.state != cluster.status.CREATING: + break + if time_left < 0: + raise AirflowException( + f"Cluster {self.cluster_name} is still CREATING state, aborting" + ) + time.sleep(time_to_sleep) + time_left = time_left - time_to_sleep + cluster = self._get_cluster(hook) + return cluster def execute(self, context): self.log.info('Creating cluster: %s', self.cluster_name) hook = DataprocHook(gcp_conn_id=self.gcp_conn_id) try: - operation = hook.create_cluster( - project_id=self.project_id, - region=self.region, - cluster=self.cluster, - request_id=self.request_id, - retry=self.retry, - timeout=self.timeout, - metadata=self.metadata, - ) - cluster = operation.result() - self.log.info("Cluster created.") + # First try to create a new cluster + cluster = self._create_cluster(hook) except AlreadyExists: - cluster = hook.get_cluster( - project_id=self.project_id, - region=self.region, - cluster_name=self.cluster_name, - retry=self.retry, - timeout=self.timeout, - metadata=self.metadata, - ) + if not self.use_if_exists: + raise self.log.info("Cluster already exists.") + cluster = self._get_cluster(hook) + + # Check if cluster is not in ERROR state + self._handle_error_state(hook, cluster) + if cluster.status.state == cluster.status.CREATING: + # Wait for cluster to be be created + cluster = self._wait_for_cluster_in_creating_state(hook) + self._handle_error_state(hook, cluster) + elif cluster.status.state == cluster.status.DELETING: + # Wait for cluster to be deleted + self._wait_for_cluster_in_deleting_state(hook) + # Create new cluster + cluster = self._create_cluster(hook) + self._handle_error_state(hook, cluster) + return MessageToDict(cluster) @@ -572,7 +669,7 @@ class DataprocScaleClusterOperator(BaseOperator): template_fields = ['cluster_name', 'project_id', 'region'] @apply_defaults - def __init__(self, + def __init__(self, *, cluster_name: str, project_id: Optional[str] = None, region: str = 'global', @@ -580,9 +677,8 @@ def __init__(self, num_preemptible_workers: int = 0, graceful_decommission_timeout: Optional[str] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.region = region self.cluster_name = cluster_name @@ -698,7 +794,7 @@ class DataprocDeleteClusterOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, project_id: str, region: str, cluster_name: str, @@ -708,10 +804,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.region = region self.cluster_name = cluster_name @@ -783,7 +878,7 @@ class DataprocJobBaseOperator(BaseOperator): job_type = "" @apply_defaults - def __init__(self, + def __init__(self, *, job_name: str = '{{task.task_id}}_{{ds_nodash}}', cluster_name: str = "cluster-1", dataproc_properties: Optional[Dict] = None, @@ -793,9 +888,8 @@ def __init__(self, labels: Optional[Dict] = None, region: str = 'global', job_error_states: Optional[Set[str]] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.gcp_conn_id = gcp_conn_id self.delegate_to = delegate_to self.labels = labels @@ -916,11 +1010,10 @@ class DataprocSubmitPigJobOperator(DataprocJobBaseOperator): @apply_defaults def __init__( - self, + self, *, query: Optional[str] = None, query_uri: Optional[str] = None, variables: Optional[Dict] = None, - *args, **kwargs ) -> None: # TODO: Remove one day @@ -932,7 +1025,7 @@ def __init__( stacklevel=1 ) - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.query = query self.query_uri = query_uri self.variables = variables @@ -982,11 +1075,10 @@ class DataprocSubmitHiveJobOperator(DataprocJobBaseOperator): @apply_defaults def __init__( - self, + self, *, query: Optional[str] = None, query_uri: Optional[str] = None, variables: Optional[Dict] = None, - *args, **kwargs ) -> None: # TODO: Remove one day @@ -998,7 +1090,7 @@ def __init__( stacklevel=1 ) - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.query = query self.query_uri = query_uri self.variables = variables @@ -1048,11 +1140,10 @@ class DataprocSubmitSparkSqlJobOperator(DataprocJobBaseOperator): @apply_defaults def __init__( - self, + self, *, query: Optional[str] = None, query_uri: Optional[str] = None, variables: Optional[Dict] = None, - *args, **kwargs ) -> None: # TODO: Remove one day @@ -1064,7 +1155,7 @@ def __init__( stacklevel=1 ) - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.query = query self.query_uri = query_uri self.variables = variables @@ -1121,13 +1212,12 @@ class DataprocSubmitSparkJobOperator(DataprocJobBaseOperator): @apply_defaults def __init__( - self, + self, *, main_jar: Optional[str] = None, main_class: Optional[str] = None, arguments: Optional[List] = None, archives: Optional[List] = None, files: Optional[List] = None, - *args, **kwargs ) -> None: # TODO: Remove one day @@ -1139,7 +1229,7 @@ def __init__( stacklevel=1 ) - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.main_jar = main_jar self.main_class = main_class self.arguments = arguments @@ -1194,13 +1284,12 @@ class DataprocSubmitHadoopJobOperator(DataprocJobBaseOperator): @apply_defaults def __init__( - self, + self, *, main_jar: Optional[str] = None, main_class: Optional[str] = None, arguments: Optional[List] = None, archives: Optional[List] = None, files: Optional[List] = None, - *args, **kwargs ) -> None: # TODO: Remove one day @@ -1212,7 +1301,7 @@ def __init__( stacklevel=1 ) - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.main_jar = main_jar self.main_class = main_class self.arguments = arguments @@ -1294,13 +1383,12 @@ def _upload_file_temp(self, bucket, local_file): @apply_defaults def __init__( - self, + self, *, main: str, arguments: Optional[List] = None, archives: Optional[List] = None, pyfiles: Optional[List] = None, files: Optional[List] = None, - *args, **kwargs ) -> None: # TODO: Remove one day @@ -1312,7 +1400,7 @@ def __init__( stacklevel=1 ) - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.main = main self.arguments = arguments self.archives = archives @@ -1413,7 +1501,7 @@ class DataprocInstantiateWorkflowTemplateOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, template_id: str, region: str, project_id: Optional[str] = None, @@ -1424,10 +1512,9 @@ def __init__( # pylint: disable=too-many-arguments timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.template_id = template_id self.parameters = parameters @@ -1504,7 +1591,7 @@ class DataprocInstantiateInlineWorkflowTemplateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, template: Dict, region: str, project_id: Optional[str] = None, @@ -1513,10 +1600,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.template = template self.project_id = project_id self.location = region @@ -1576,7 +1662,7 @@ class DataprocSubmitJobOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, project_id: str, location: str, job: Dict, @@ -1585,10 +1671,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.location = location self.job = job @@ -1665,7 +1750,7 @@ class DataprocUpdateClusterOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, location: str, cluster_name: str, cluster: Union[Dict, Cluster], @@ -1677,10 +1762,9 @@ def __init__( # pylint: disable=too-many-arguments timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.location = location self.cluster_name = cluster_name diff --git a/airflow/providers/google/cloud/operators/datastore.py b/airflow/providers/google/cloud/operators/datastore.py index 4e8dceea2f385..c0c9508f5aae8 100644 --- a/airflow/providers/google/cloud/operators/datastore.py +++ b/airflow/providers/google/cloud/operators/datastore.py @@ -19,7 +19,7 @@ """ This module contains Google Datastore operators. """ -from typing import Optional +from typing import Any, Dict, List, Optional from airflow.exceptions import AirflowException from airflow.models import BaseOperator @@ -32,6 +32,10 @@ class CloudDatastoreExportEntitiesOperator(BaseOperator): """ Export entities from Google Cloud Datastore to Cloud Storage + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudDatastoreExportEntitiesOperator` + :param bucket: name of the cloud storage bucket to backup data :type bucket: str :param namespace: optional namespace path in the specified Cloud Storage bucket @@ -63,6 +67,7 @@ class CloudDatastoreExportEntitiesOperator(BaseOperator): @apply_defaults def __init__(self, # pylint: disable=too-many-arguments + *, bucket: str, namespace: Optional[str] = None, datastore_conn_id: str = 'google_cloud_default', @@ -73,9 +78,8 @@ def __init__(self, # pylint: disable=too-many-arguments polling_interval_in_seconds: int = 10, overwrite_existing: bool = False, project_id: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.datastore_conn_id = datastore_conn_id self.cloud_storage_conn_id = cloud_storage_conn_id self.delegate_to = delegate_to @@ -119,6 +123,10 @@ class CloudDatastoreImportEntitiesOperator(BaseOperator): """ Import entities from Cloud Storage to Google Cloud Datastore + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudDatastoreImportEntitiesOperator` + :param bucket: container in Cloud Storage to store data :type bucket: str :param file: path of the backup metadata file in the specified Cloud Storage bucket. @@ -148,6 +156,7 @@ class CloudDatastoreImportEntitiesOperator(BaseOperator): @apply_defaults def __init__(self, + *, bucket: str, file: str, namespace: Optional[str] = None, @@ -157,9 +166,8 @@ def __init__(self, delegate_to: Optional[str] = None, polling_interval_in_seconds: float = 10, project_id: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.datastore_conn_id = datastore_conn_id self.delegate_to = delegate_to self.bucket = bucket @@ -191,3 +199,322 @@ def execute(self, context): raise AirflowException('Operation failed: result={}'.format(result)) return result + + +class CloudDatastoreAllocateIdsOperator(BaseOperator): + """ + Allocate IDs for incomplete keys. Return list of keys. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudDatastoreAllocateIdsOperator` + + .. seealso:: + https://cloud.google.com/datastore/docs/reference/rest/v1/projects/allocateIds + + :param partial_keys: a list of partial keys. + :type partial_keys: list + :param project_id: Google Cloud Platform project ID against which to make the request. + :type project_id: str + :param delegate_to: The account to impersonate, if any. + For this to work, the service account making the request must have domain-wide + delegation enabled. + :type delegate_to: str + :param gcp_conn_id: The connection ID to use connecting to Google Cloud Platform. + :type gcp_conn_id: str + """ + template_fields = ("partial_keys",) + + @apply_defaults + def __init__( + self, *, + partial_keys: List, + project_id: Optional[str] = None, + delegate_to: Optional[str] = None, + gcp_conn_id: str = 'google_cloud_default', + **kwargs + ) -> None: + super().__init__(**kwargs) + + self.partial_keys = partial_keys + self.gcp_conn_id = gcp_conn_id + self.project_id = project_id + self.delegate_to = delegate_to + + def execute(self, context): + hook = DatastoreHook(gcp_conn_id=self.gcp_conn_id) + keys = hook.allocate_ids( + partial_keys=self.partial_keys, + project_id=self.project_id, + ) + return keys + + +class CloudDatastoreBeginTransactionOperator(BaseOperator): + """ + Begins a new transaction. Returns a transaction handle. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudDatastoreBeginTransactionOperator` + + .. seealso:: + https://cloud.google.com/datastore/docs/reference/rest/v1/projects/beginTransaction + + :param transaction_options: Options for a new transaction. + :type transaction_options: Dict[str, Any] + :param project_id: Google Cloud Platform project ID against which to make the request. + :type project_id: str + :param delegate_to: The account to impersonate, if any. + For this to work, the service account making the request must have domain-wide + delegation enabled. + :type delegate_to: str + :param gcp_conn_id: The connection ID to use connecting to Google Cloud Platform. + :type gcp_conn_id: str + """ + template_fields = ("transaction_options",) + + @apply_defaults + def __init__( + self, *, + transaction_options: Dict[str, Any], + project_id: Optional[str] = None, + delegate_to: Optional[str] = None, + gcp_conn_id: str = 'google_cloud_default', + **kwargs + ) -> None: + super().__init__(**kwargs) + + self.transaction_options = transaction_options + self.gcp_conn_id = gcp_conn_id + self.project_id = project_id + self.delegate_to = delegate_to + + def execute(self, context): + hook = DatastoreHook(gcp_conn_id=self.gcp_conn_id) + handle = hook.begin_transaction( + transaction_options=self.transaction_options, + project_id=self.project_id, + ) + return handle + + +class CloudDatastoreCommitOperator(BaseOperator): + """ + Commit a transaction, optionally creating, deleting or modifying some entities. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudDatastoreCommitOperator` + + .. seealso:: + https://cloud.google.com/datastore/docs/reference/rest/v1/projects/commit + + :param body: the body of the commit request. + :type body: dict + :param project_id: Google Cloud Platform project ID against which to make the request. + :type project_id: str + :param delegate_to: The account to impersonate, if any. + For this to work, the service account making the request must have domain-wide + delegation enabled. + :type delegate_to: str + :param gcp_conn_id: The connection ID to use connecting to Google Cloud Platform. + :type gcp_conn_id: str + """ + template_fields = ("body",) + + @apply_defaults + def __init__( + self, *, + body: Dict[str, Any], + project_id: Optional[str] = None, + delegate_to: Optional[str] = None, + gcp_conn_id: str = 'google_cloud_default', + **kwargs + ) -> None: + super().__init__(**kwargs) + + self.body = body + self.gcp_conn_id = gcp_conn_id + self.project_id = project_id + self.delegate_to = delegate_to + + def execute(self, context): + hook = DatastoreHook(gcp_conn_id=self.gcp_conn_id) + response = hook.commit( + body=self.body, + project_id=self.project_id, + ) + return response + + +class CloudDatastoreRollbackOperator(BaseOperator): + """ + Roll back a transaction. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudDatastoreRollbackOperator` + + .. seealso:: + https://cloud.google.com/datastore/docs/reference/rest/v1/projects/rollback + + :param transaction: the transaction to roll back. + :type transaction: str + :param project_id: Google Cloud Platform project ID against which to make the request. + :type project_id: str + :param delegate_to: The account to impersonate, if any. + For this to work, the service account making the request must have domain-wide + delegation enabled. + :type delegate_to: str + :param gcp_conn_id: The connection ID to use connecting to Google Cloud Platform. + :type gcp_conn_id: str + """ + template_fields = ("transaction",) + + @apply_defaults + def __init__( + self, *, + transaction: str, + project_id: Optional[str] = None, + delegate_to: Optional[str] = None, + gcp_conn_id: str = 'google_cloud_default', + **kwargs + ) -> None: + super().__init__(**kwargs) + + self.transaction = transaction + self.gcp_conn_id = gcp_conn_id + self.project_id = project_id + self.delegate_to = delegate_to + + def execute(self, context): + hook = DatastoreHook(gcp_conn_id=self.gcp_conn_id) + hook.rollback( + transaction=self.transaction, + project_id=self.project_id, + ) + + +class CloudDatastoreRunQueryOperator(BaseOperator): + """ + Run a query for entities. Returns the batch of query results. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudDatastoreRunQueryOperator` + + .. seealso:: + https://cloud.google.com/datastore/docs/reference/rest/v1/projects/runQuery + + :param body: the body of the query request. + :type body: dict + :param project_id: Google Cloud Platform project ID against which to make the request. + :type project_id: str + :param delegate_to: The account to impersonate, if any. + For this to work, the service account making the request must have domain-wide + delegation enabled. + :type delegate_to: str + :param gcp_conn_id: The connection ID to use connecting to Google Cloud Platform. + :type gcp_conn_id: str + """ + template_fields = ("body",) + + @apply_defaults + def __init__( + self, *, + body: Dict[str, Any], + project_id: Optional[str] = None, + delegate_to: Optional[str] = None, + gcp_conn_id: str = 'google_cloud_default', + **kwargs + ) -> None: + super().__init__(**kwargs) + + self.body = body + self.gcp_conn_id = gcp_conn_id + self.project_id = project_id + self.delegate_to = delegate_to + + def execute(self, context): + hook = DatastoreHook(gcp_conn_id=self.gcp_conn_id) + response = hook.run_query( + body=self.body, + project_id=self.project_id, + ) + return response + + +class CloudDatastoreGetOperationOperator(BaseOperator): + """ + Gets the latest state of a long-running operation. + + .. seealso:: + https://cloud.google.com/datastore/docs/reference/data/rest/v1/projects.operations/get + + :param name: the name of the operation resource. + :type name: str + :param delegate_to: The account to impersonate, if any. + For this to work, the service account making the request must have domain-wide + delegation enabled. + :type delegate_to: str + :param gcp_conn_id: The connection ID to use connecting to Google Cloud Platform. + :type gcp_conn_id: str + """ + template_fields = ("name",) + + @apply_defaults + def __init__( + self, *, + name: str, + delegate_to: Optional[str] = None, + gcp_conn_id: str = 'google_cloud_default', + **kwargs + ) -> None: + super().__init__(**kwargs) + + self.name = name + self.gcp_conn_id = gcp_conn_id + self.delegate_to = delegate_to + + def execute(self, context): + hook = DatastoreHook(gcp_conn_id=self.gcp_conn_id) + op = hook.get_operation(name=self.name) + return op + + +class CloudDatastoreDeleteOperationOperator(BaseOperator): + """ + Deletes the long-running operation. + + .. seealso:: + https://cloud.google.com/datastore/docs/reference/data/rest/v1/projects.operations/delete + + :param name: the name of the operation resource. + :type name: str + :param delegate_to: The account to impersonate, if any. + For this to work, the service account making the request must have domain-wide + delegation enabled. + :type delegate_to: str + :param gcp_conn_id: The connection ID to use connecting to Google Cloud Platform. + :type gcp_conn_id: str + """ + template_fields = ("name",) + + @apply_defaults + def __init__( + self, *, + name: str, + delegate_to: Optional[str] = None, + gcp_conn_id: str = 'google_cloud_default', + **kwargs + ) -> None: + super().__init__(**kwargs) + + self.name = name + self.gcp_conn_id = gcp_conn_id + self.delegate_to = delegate_to + + def execute(self, context): + hook = DatastoreHook(gcp_conn_id=self.gcp_conn_id) + hook.delete_operation(name=self.name) diff --git a/airflow/providers/google/cloud/operators/dlp.py b/airflow/providers/google/cloud/operators/dlp.py index 9617e8ede494e..7df731e557958 100644 --- a/airflow/providers/google/cloud/operators/dlp.py +++ b/airflow/providers/google/cloud/operators/dlp.py @@ -65,17 +65,16 @@ class CloudDLPCancelDLPJobOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, dlp_job_id: str, project_id: Optional[str] = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.dlp_job_id = dlp_job_id self.project_id = project_id self.retry = retry @@ -134,7 +133,7 @@ class CloudDLPCreateDeidentifyTemplateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, organization_id: Optional[str] = None, project_id: Optional[str] = None, deidentify_template: Optional[Union[Dict, DeidentifyTemplate]] = None, @@ -143,10 +142,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.organization_id = organization_id self.project_id = project_id self.deidentify_template = deidentify_template @@ -216,7 +214,7 @@ class CloudDLPCreateDLPJobOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, project_id: Optional[str] = None, inspect_job: Optional[Union[Dict, InspectJobConfig]] = None, risk_job: Optional[Union[Dict, RiskAnalysisJobConfig]] = None, @@ -226,10 +224,9 @@ def __init__( metadata: Optional[Sequence[Tuple[str, str]]] = None, wait_until_finished: bool = True, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.inspect_job = inspect_job self.risk_job = risk_job @@ -304,7 +301,7 @@ class CloudDLPCreateInspectTemplateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, organization_id: Optional[str] = None, project_id: Optional[str] = None, inspect_template: Optional[InspectTemplate] = None, @@ -313,10 +310,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.organization_id = organization_id self.project_id = project_id self.inspect_template = inspect_template @@ -381,7 +377,7 @@ class CloudDLPCreateJobTriggerOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, project_id: Optional[str] = None, job_trigger: Optional[Union[Dict, JobTrigger]] = None, trigger_id: Optional[str] = None, @@ -389,10 +385,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.job_trigger = job_trigger self.trigger_id = trigger_id @@ -462,7 +457,7 @@ class CloudDLPCreateStoredInfoTypeOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, organization_id: Optional[str] = None, project_id: Optional[str] = None, config: Optional[StoredInfoTypeConfig] = None, @@ -471,10 +466,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.organization_id = organization_id self.project_id = project_id self.config = config @@ -559,7 +553,7 @@ class CloudDLPDeidentifyContentOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, project_id: Optional[str] = None, deidentify_config: Optional[Union[Dict, DeidentifyConfig]] = None, inspect_config: Optional[Union[Dict, InspectConfig]] = None, @@ -570,10 +564,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.deidentify_config = deidentify_config self.inspect_config = inspect_config @@ -631,7 +624,7 @@ class CloudDLPDeleteDeidentifyTemplateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, template_id: str, organization_id: Optional[str] = None, project_id: Optional[str] = None, @@ -639,10 +632,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.template_id = template_id self.organization_id = organization_id self.project_id = project_id @@ -694,17 +686,16 @@ class CloudDLPDeleteDLPJobOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, dlp_job_id: str, project_id: Optional[str] = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.dlp_job_id = dlp_job_id self.project_id = project_id self.retry = retry @@ -756,7 +747,7 @@ class CloudDLPDeleteInspectTemplateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, template_id: str, organization_id: Optional[str] = None, project_id: Optional[str] = None, @@ -764,10 +755,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.template_id = template_id self.organization_id = organization_id self.project_id = project_id @@ -818,17 +808,16 @@ class CloudDLPDeleteJobTriggerOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, job_trigger_id: str, project_id: Optional[str] = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.job_trigger_id = job_trigger_id self.project_id = project_id self.retry = retry @@ -885,7 +874,7 @@ class CloudDLPDeleteStoredInfoTypeOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, stored_info_type_id: str, organization_id: Optional[str] = None, project_id: Optional[str] = None, @@ -893,10 +882,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.stored_info_type_id = stored_info_type_id self.organization_id = organization_id self.project_id = project_id @@ -951,7 +939,7 @@ class CloudDLPGetDeidentifyTemplateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, template_id: str, organization_id: Optional[str] = None, project_id: Optional[str] = None, @@ -959,10 +947,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.template_id = template_id self.organization_id = organization_id self.project_id = project_id @@ -1012,17 +999,16 @@ class CloudDLPGetDLPJobOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, dlp_job_id: str, project_id: Optional[str] = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.dlp_job_id = dlp_job_id self.project_id = project_id self.retry = retry @@ -1073,7 +1059,7 @@ class CloudDLPGetInspectTemplateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, template_id: str, organization_id: Optional[str] = None, project_id: Optional[str] = None, @@ -1081,10 +1067,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.template_id = template_id self.organization_id = organization_id self.project_id = project_id @@ -1134,17 +1119,16 @@ class CloudDLPGetDLPJobTriggerOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, job_trigger_id: str, project_id: Optional[str] = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.job_trigger_id = job_trigger_id self.project_id = project_id self.retry = retry @@ -1200,7 +1184,7 @@ class CloudDLPGetStoredInfoTypeOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, stored_info_type_id: str, organization_id: Optional[str] = None, project_id: Optional[str] = None, @@ -1208,10 +1192,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.stored_info_type_id = stored_info_type_id self.organization_id = organization_id self.project_id = project_id @@ -1274,7 +1257,7 @@ class CloudDLPInspectContentOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, project_id: Optional[str] = None, inspect_config: Optional[Union[Dict, InspectConfig]] = None, item: Optional[Union[Dict, ContentItem]] = None, @@ -1283,10 +1266,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.inspect_config = inspect_config self.item = item @@ -1345,7 +1327,7 @@ class CloudDLPListDeidentifyTemplatesOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, organization_id: Optional[str] = None, project_id: Optional[str] = None, page_size: Optional[int] = None, @@ -1354,10 +1336,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.organization_id = organization_id self.project_id = project_id self.page_size = page_size @@ -1417,7 +1398,7 @@ class CloudDLPListDLPJobsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, project_id: Optional[str] = None, results_filter: Optional[str] = None, page_size: Optional[int] = None, @@ -1427,10 +1408,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.results_filter = results_filter self.page_size = page_size @@ -1484,17 +1464,16 @@ class CloudDLPListInfoTypesOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, language_code: Optional[str] = None, results_filter: Optional[str] = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.language_code = language_code self.results_filter = results_filter self.retry = retry @@ -1549,7 +1528,7 @@ class CloudDLPListInspectTemplatesOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, organization_id: Optional[str] = None, project_id: Optional[str] = None, page_size: Optional[int] = None, @@ -1558,10 +1537,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.organization_id = organization_id self.project_id = project_id self.page_size = page_size @@ -1619,7 +1597,7 @@ class CloudDLPListJobTriggersOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, project_id: Optional[str] = None, page_size: Optional[int] = None, order_by: Optional[str] = None, @@ -1628,10 +1606,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.page_size = page_size self.order_by = order_by @@ -1690,7 +1667,7 @@ class CloudDLPListStoredInfoTypesOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, organization_id: Optional[str] = None, project_id: Optional[str] = None, page_size: Optional[int] = None, @@ -1699,10 +1676,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.organization_id = organization_id self.project_id = project_id self.page_size = page_size @@ -1772,7 +1748,7 @@ class CloudDLPRedactImageOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, project_id: Optional[str] = None, inspect_config: Optional[Union[Dict, InspectConfig]] = None, image_redaction_configs: Optional[Union[Dict, RedactImageRequest.ImageRedactionConfig]] = None, @@ -1782,10 +1758,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.inspect_config = inspect_config self.image_redaction_configs = image_redaction_configs @@ -1859,7 +1834,7 @@ class CloudDLPReidentifyContentOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, project_id: Optional[str] = None, reidentify_config: Optional[Union[Dict, DeidentifyConfig]] = None, inspect_config: Optional[Union[Dict, InspectConfig]] = None, @@ -1870,10 +1845,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.reidentify_config = reidentify_config self.inspect_config = inspect_config @@ -1943,7 +1917,7 @@ class CloudDLPUpdateDeidentifyTemplateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, template_id: str, organization_id: Optional[str] = None, project_id: Optional[str] = None, @@ -1953,10 +1927,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.template_id = template_id self.organization_id = organization_id self.project_id = project_id @@ -2024,7 +1997,7 @@ class CloudDLPUpdateInspectTemplateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, template_id: str, organization_id: Optional[str] = None, project_id: Optional[str] = None, @@ -2034,10 +2007,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.template_id = template_id self.organization_id = organization_id self.project_id = project_id @@ -2101,7 +2073,7 @@ class CloudDLPUpdateJobTriggerOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, job_trigger_id, project_id: Optional[str] = None, job_trigger: Optional[JobTrigger] = None, @@ -2110,10 +2082,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.job_trigger_id = job_trigger_id self.project_id = project_id self.job_trigger = job_trigger @@ -2180,7 +2151,7 @@ class CloudDLPUpdateStoredInfoTypeOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, stored_info_type_id, organization_id: Optional[str] = None, project_id: Optional[str] = None, @@ -2190,10 +2161,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.stored_info_type_id = stored_info_type_id self.organization_id = organization_id self.project_id = project_id diff --git a/airflow/providers/google/cloud/operators/functions.py b/airflow/providers/google/cloud/operators/functions.py index 152ba3f69dc07..245205a25f402 100644 --- a/airflow/providers/google/cloud/operators/functions.py +++ b/airflow/providers/google/cloud/operators/functions.py @@ -122,7 +122,7 @@ class CloudFunctionDeployFunctionOperator(BaseOperator): # [END gcf_function_deploy_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, location: str, body: Dict, project_id: Optional[str] = None, @@ -130,7 +130,7 @@ def __init__(self, api_version: str = 'v1', zip_path: Optional[str] = None, validate_body: bool = True, - *args, **kwargs) -> None: + **kwargs) -> None: self.project_id = project_id self.location = location self.body = body @@ -143,7 +143,7 @@ def __init__(self, self._field_validator = GcpBodyFieldValidator(CLOUD_FUNCTION_VALIDATION, api_version=api_version) self._validate_inputs() - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def _validate_inputs(self): if not self.location: @@ -314,16 +314,16 @@ class CloudFunctionDeleteFunctionOperator(BaseOperator): # [END gcf_function_delete_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, name: str, gcp_conn_id: str = 'google_cloud_default', api_version: str = 'v1', - *args, **kwargs) -> None: + **kwargs) -> None: self.name = name self.gcp_conn_id = gcp_conn_id self.api_version = api_version self._validate_inputs() - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def _validate_inputs(self): if not self.name: @@ -371,17 +371,16 @@ class CloudFunctionInvokeFunctionOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, function_id: str, input_data: Dict, location: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', api_version: str = 'v1', - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.function_id = function_id self.input_data = input_data self.location = location diff --git a/airflow/providers/google/cloud/operators/gcs.py b/airflow/providers/google/cloud/operators/gcs.py index fa24ad48fa74a..ddfb13d7df356 100644 --- a/airflow/providers/google/cloud/operators/gcs.py +++ b/airflow/providers/google/cloud/operators/gcs.py @@ -100,7 +100,7 @@ class GCSCreateBucketOperator(BaseOperator): ui_color = '#f0eee4' @apply_defaults - def __init__(self, + def __init__(self, *, bucket_name: str, resource: Optional[Dict] = None, storage_class: str = 'MULTI_REGIONAL', @@ -110,9 +110,8 @@ def __init__(self, gcp_conn_id: str = 'google_cloud_default', google_cloud_storage_conn_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if google_cloud_storage_conn_id: warnings.warn( @@ -188,16 +187,15 @@ class GCSListObjectsOperator(BaseOperator): ui_color = '#f0eee4' @apply_defaults - def __init__(self, + def __init__(self, *, bucket: str, prefix: Optional[str] = None, delimiter: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', google_cloud_storage_conn_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if google_cloud_storage_conn_id: warnings.warn( @@ -253,14 +251,14 @@ class GCSDeleteObjectsOperator(BaseOperator): template_fields = ('bucket_name', 'prefix', 'objects') @apply_defaults - def __init__(self, + def __init__(self, *, bucket_name: str, objects: Optional[Iterable[str]] = None, prefix: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', google_cloud_storage_conn_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs) -> None: + **kwargs) -> None: if google_cloud_storage_conn_id: warnings.warn( @@ -277,7 +275,7 @@ def __init__(self, if not objects and not prefix: raise ValueError("Either object or prefix should be set. Both are None") - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def execute(self, context): hook = GCSHook( @@ -330,17 +328,16 @@ class GCSBucketCreateAclEntryOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, bucket: str, entity: str, role: str, user_project: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', google_cloud_storage_conn_id: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if google_cloud_storage_conn_id: warnings.warn( @@ -399,7 +396,7 @@ class GCSObjectCreateAclEntryOperator(BaseOperator): # [END gcs_object_create_acl_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, bucket: str, object_name: str, entity: str, @@ -408,8 +405,8 @@ def __init__(self, user_project: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', google_cloud_storage_conn_id: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) if google_cloud_storage_conn_id: warnings.warn( @@ -466,17 +463,16 @@ class GCSFileTransformOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, source_bucket: str, source_object: str, transform_script: Union[str, List[str]], destination_bucket: Optional[str] = None, destination_object: Optional[str] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.source_bucket = source_bucket self.source_object = source_object self.destination_bucket = destination_bucket or self.source_bucket @@ -552,12 +548,12 @@ class GCSDeleteBucketOperator(BaseOperator): template_fields = ('bucket_name', "gcp_conn_id") @apply_defaults - def __init__(self, + def __init__(self, *, bucket_name: str, force: bool = True, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.bucket_name = bucket_name self.force: bool = force @@ -619,7 +615,7 @@ class GCSSynchronizeBucketsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, source_bucket: str, destination_bucket: str, source_object: Optional[str] = None, @@ -629,10 +625,9 @@ def __init__( allow_overwrite: bool = False, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.source_bucket = source_bucket self.destination_bucket = destination_bucket self.source_object = source_object diff --git a/airflow/providers/google/cloud/operators/kubernetes_engine.py b/airflow/providers/google/cloud/operators/kubernetes_engine.py index f97593f794a41..a9aed1e29031d 100644 --- a/airflow/providers/google/cloud/operators/kubernetes_engine.py +++ b/airflow/providers/google/cloud/operators/kubernetes_engine.py @@ -74,14 +74,14 @@ class GKEDeleteClusterOperator(BaseOperator): @apply_defaults def __init__(self, + *, name: str, location: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', api_version: str = 'v2', - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.gcp_conn_id = gcp_conn_id @@ -153,14 +153,14 @@ class GKECreateClusterOperator(BaseOperator): @apply_defaults def __init__(self, + *, location: str, body: Optional[Union[Dict, Cluster]], project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', api_version: str = 'v2', - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.gcp_conn_id = gcp_conn_id @@ -226,6 +226,7 @@ class GKEStartPodOperator(KubernetesPodOperator): @apply_defaults def __init__(self, + *, location: str, cluster_name: str, use_internal_ip: bool = False, diff --git a/airflow/providers/google/cloud/operators/life_sciences.py b/airflow/providers/google/cloud/operators/life_sciences.py index 92956737aa02f..e176c06a683e8 100644 --- a/airflow/providers/google/cloud/operators/life_sciences.py +++ b/airflow/providers/google/cloud/operators/life_sciences.py @@ -50,13 +50,14 @@ class LifeSciencesRunPipelineOperator(BaseOperator): @apply_defaults def __init__(self, + *, body: dict, location: str, project_id: Optional[str] = None, gcp_conn_id: str = "google_cloud_default", api_version: str = "v2beta", - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.body = body self.location = location self.project_id = project_id diff --git a/airflow/providers/google/cloud/operators/mlengine.py b/airflow/providers/google/cloud/operators/mlengine.py index 623c14d540c2c..a25fef8ba1822 100644 --- a/airflow/providers/google/cloud/operators/mlengine.py +++ b/airflow/providers/google/cloud/operators/mlengine.py @@ -168,6 +168,7 @@ class MLEngineStartBatchPredictionJobOperator(BaseOperator): @apply_defaults def __init__(self, # pylint: disable=too-many-arguments + *, job_id: str, region: str, data_format: str, @@ -182,9 +183,8 @@ def __init__(self, # pylint: disable=too-many-arguments project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self._project_id = project_id self._job_id = job_id @@ -318,14 +318,14 @@ class MLEngineManageModelOperator(BaseOperator): @apply_defaults def __init__(self, + *, model: dict, operation: str = 'create', project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) warnings.warn( "This operator is deprecated. Consider using operators for specific operations: " @@ -381,13 +381,13 @@ class MLEngineCreateModelOperator(BaseOperator): @apply_defaults def __init__(self, + *, model: dict, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self._project_id = project_id self._model = model self._gcp_conn_id = gcp_conn_id @@ -429,13 +429,13 @@ class MLEngineGetModelOperator(BaseOperator): @apply_defaults def __init__(self, + *, model_name: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self._project_id = project_id self._model_name = model_name self._gcp_conn_id = gcp_conn_id @@ -480,14 +480,14 @@ class MLEngineDeleteModelOperator(BaseOperator): @apply_defaults def __init__(self, + *, model_name: str, delete_contents: bool = False, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self._project_id = project_id self._model_name = model_name self._delete_contents = delete_contents @@ -565,6 +565,7 @@ class MLEngineManageVersionOperator(BaseOperator): @apply_defaults def __init__(self, + *, model_name: str, version_name: Optional[str] = None, version: Optional[dict] = None, @@ -572,9 +573,8 @@ def __init__(self, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self._project_id = project_id self._model_name = model_name self._version_name = version_name @@ -661,15 +661,15 @@ class MLEngineCreateVersionOperator(BaseOperator): @apply_defaults def __init__(self, + *, model_name: str, version: dict, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self._project_id = project_id self._model_name = model_name self._version = version @@ -728,15 +728,15 @@ class MLEngineSetDefaultVersionOperator(BaseOperator): @apply_defaults def __init__(self, + *, model_name: str, version_name: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self._project_id = project_id self._model_name = model_name self._version_name = version_name @@ -791,14 +791,14 @@ class MLEngineListVersionsOperator(BaseOperator): @apply_defaults def __init__(self, + *, model_name: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self._project_id = project_id self._model_name = model_name self._gcp_conn_id = gcp_conn_id @@ -852,15 +852,15 @@ class MLEngineDeleteVersionOperator(BaseOperator): @apply_defaults def __init__(self, + *, model_name: str, version_name: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self._project_id = project_id self._model_name = model_name self._version_name = version_name @@ -975,6 +975,7 @@ class MLEngineStartTrainingJobOperator(BaseOperator): @apply_defaults def __init__(self, # pylint: disable=too-many-arguments + *, job_id: str, package_uris: List[str], training_python_module: str, @@ -989,9 +990,8 @@ def __init__(self, # pylint: disable=too-many-arguments gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, mode: str = 'PRODUCTION', - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self._project_id = project_id self._job_id = job_id self._package_uris = package_uris @@ -1114,13 +1114,13 @@ class MLEngineTrainingCancelJobOperator(BaseOperator): @apply_defaults def __init__(self, + *, job_id: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self._project_id = project_id self._job_id = job_id self._gcp_conn_id = gcp_conn_id diff --git a/airflow/providers/google/cloud/operators/natural_language.py b/airflow/providers/google/cloud/operators/natural_language.py index fae7b04d46572..a314dde020dc2 100644 --- a/airflow/providers/google/cloud/operators/natural_language.py +++ b/airflow/providers/google/cloud/operators/natural_language.py @@ -27,6 +27,7 @@ from airflow.models import BaseOperator from airflow.providers.google.cloud.hooks.natural_language import CloudNaturalLanguageHook +from airflow.utils.decorators import apply_defaults MetaData = Sequence[Tuple[str, str]] @@ -59,18 +60,18 @@ class CloudNaturalLanguageAnalyzeEntitiesOperator(BaseOperator): template_fields = ("document", "gcp_conn_id") # [END natural_language_analyze_entities_template_fields] + @apply_defaults def __init__( - self, + self, *, document: Union[dict, Document], encoding_type: Optional[enums.EncodingType] = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.document = document self.encoding_type = encoding_type self.retry = retry @@ -119,18 +120,18 @@ class CloudNaturalLanguageAnalyzeEntitySentimentOperator(BaseOperator): template_fields = ("document", "gcp_conn_id") # [END natural_language_analyze_entity_sentiment_template_fields] + @apply_defaults def __init__( - self, + self, *, document: Union[dict, Document], encoding_type: Optional[enums.EncodingType] = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.document = document self.encoding_type = encoding_type self.retry = retry @@ -182,18 +183,18 @@ class CloudNaturalLanguageAnalyzeSentimentOperator(BaseOperator): template_fields = ("document", "gcp_conn_id") # [END natural_language_analyze_sentiment_template_fields] + @apply_defaults def __init__( - self, + self, *, document: Union[dict, Document], encoding_type: Optional[enums.EncodingType] = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.document = document self.encoding_type = encoding_type self.retry = retry @@ -238,17 +239,17 @@ class CloudNaturalLanguageClassifyTextOperator(BaseOperator): template_fields = ("document", "gcp_conn_id") # [END natural_language_classify_text_template_fields] + @apply_defaults def __init__( - self, + self, *, document: Union[dict, Document], retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.document = document self.retry = retry self.timeout = timeout diff --git a/airflow/providers/google/cloud/operators/pubsub.py b/airflow/providers/google/cloud/operators/pubsub.py index 233d3fb79c4e6..2ec8b08ba3b38 100644 --- a/airflow/providers/google/cloud/operators/pubsub.py +++ b/airflow/providers/google/cloud/operators/pubsub.py @@ -22,7 +22,10 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union from google.api_core.retry import Retry -from google.cloud.pubsub_v1.types import Duration, MessageStoragePolicy, PushConfig, ReceivedMessage +from google.cloud.pubsub_v1.types import ( + DeadLetterPolicy, Duration, ExpirationPolicy, MessageStoragePolicy, PushConfig, ReceivedMessage, + RetryPolicy, +) from google.protobuf.json_format import MessageToDict from airflow.models import BaseOperator @@ -109,22 +112,21 @@ class PubSubCreateTopicOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults def __init__( - self, - topic: str, - project_id: Optional[str] = None, - fail_if_exists: bool = False, - gcp_conn_id: str = 'google_cloud_default', - delegate_to: Optional[str] = None, - labels: Optional[Dict[str, str]] = None, - message_storage_policy: Union[Dict, MessageStoragePolicy] = None, - kms_key_name: Optional[str] = None, - retry: Optional[Retry] = None, - timeout: Optional[float] = None, - metadata: Optional[Sequence[Tuple[str, str]]] = None, - project: Optional[str] = None, - *args, - **kwargs) -> None: - + self, *, + topic: str, + project_id: Optional[str] = None, + fail_if_exists: bool = False, + gcp_conn_id: str = 'google_cloud_default', + delegate_to: Optional[str] = None, + labels: Optional[Dict[str, str]] = None, + message_storage_policy: Union[Dict, MessageStoragePolicy] = None, + kms_key_name: Optional[str] = None, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + project: Optional[str] = None, + **kwargs + ) -> None: # To preserve backward compatibility # TODO: remove one day if project: @@ -133,7 +135,7 @@ def __init__( "the project_id parameter.", DeprecationWarning, stacklevel=2) project_id = project - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.topic = topic self.fail_if_exists = fail_if_exists @@ -165,6 +167,7 @@ def execute(self, context): self.log.info("Created topic %s", self.topic) +# pylint: disable=too-many-instance-attributes class PubSubCreateSubscriptionOperator(BaseOperator): """Create a PubSub subscription. @@ -258,6 +261,32 @@ class PubSubCreateSubscriptionOperator(BaseOperator): :param labels: Client-assigned labels; see https://cloud.google.com/pubsub/docs/labels :type labels: Dict[str, str] + :param enable_message_ordering: If true, messages published with the same + ordering_key in PubsubMessage will be delivered to the subscribers in the order + in which they are received by the Pub/Sub system. Otherwise, they may be + delivered in any order. + :type enable_message_ordering: bool + :param expiration_policy: A policy that specifies the conditions for this + subscription’s expiration. A subscription is considered active as long as any + connected subscriber is successfully consuming messages from the subscription or + is issuing operations on the subscription. If expiration_policy is not set, + a default policy with ttl of 31 days will be used. The minimum allowed value for + expiration_policy.ttl is 1 day. + :type expiration_policy: Union[Dict, google.cloud.pubsub_v1.types.ExpirationPolicy`] + :param filter_: An expression written in the Cloud Pub/Sub filter language. If + non-empty, then only PubsubMessages whose attributes field matches the filter are + delivered on this subscription. If empty, then no messages are filtered out. + :type filter_: str + :param dead_letter_policy: A policy that specifies the conditions for dead lettering + messages in this subscription. If dead_letter_policy is not set, dead lettering is + disabled. + :type dead_letter_policy: Union[Dict, google.cloud.pubsub_v1.types.DeadLetterPolicy] + :param retry_policy: A policy that specifies how Pub/Sub retries message delivery + for this subscription. If not set, the default retry policy is applied. This + generally implies that messages will be retried as soon as possible for healthy + subscribers. RetryPolicy will be triggered on NACKs or acknowledgement deadline + exceeded events for a given message. + :type retry_policy: Union[Dict, google.cloud.pubsub_v1.types.RetryPolicy] :param retry: (Optional) A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry @@ -279,26 +308,31 @@ class PubSubCreateSubscriptionOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults def __init__( - self, - topic: str, - project_id: Optional[str] = None, - subscription: Optional[str] = None, - subscription_project_id: Optional[str] = None, - ack_deadline_secs: int = 10, - fail_if_exists: bool = False, - gcp_conn_id: str = 'google_cloud_default', - delegate_to: Optional[str] = None, - push_config: Optional[Union[Dict, PushConfig]] = None, - retain_acked_messages: Optional[bool] = None, - message_retention_duration: Optional[Union[Dict, Duration]] = None, - labels: Optional[Dict[str, str]] = None, - retry: Optional[Retry] = None, - timeout: Optional[float] = None, - metadata: Optional[Sequence[Tuple[str, str]]] = None, - topic_project: Optional[str] = None, - subscription_project: Optional[str] = None, - *args, - **kwargs) -> None: + self, *, + topic: str, + project_id: Optional[str] = None, + subscription: Optional[str] = None, + subscription_project_id: Optional[str] = None, + ack_deadline_secs: int = 10, + fail_if_exists: bool = False, + gcp_conn_id: str = 'google_cloud_default', + delegate_to: Optional[str] = None, + push_config: Optional[Union[Dict, PushConfig]] = None, + retain_acked_messages: Optional[bool] = None, + message_retention_duration: Optional[Union[Dict, Duration]] = None, + labels: Optional[Dict[str, str]] = None, + enable_message_ordering: bool = False, + expiration_policy: Optional[Union[Dict, ExpirationPolicy]] = None, + filter_: Optional[str] = None, + dead_letter_policy: Optional[Union[Dict, DeadLetterPolicy]] = None, + retry_policy: Optional[Union[Dict, RetryPolicy]] = None, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + topic_project: Optional[str] = None, + subscription_project: Optional[str] = None, + **kwargs + ) -> None: # To preserve backward compatibility # TODO: remove one day @@ -313,7 +347,7 @@ def __init__( "the subscription_project parameter.", DeprecationWarning, stacklevel=2) subscription_project_id = subscription_project - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.topic = topic self.subscription = subscription @@ -326,6 +360,11 @@ def __init__( self.retain_acked_messages = retain_acked_messages self.message_retention_duration = message_retention_duration self.labels = labels + self.enable_message_ordering = enable_message_ordering + self.expiration_policy = expiration_policy + self.filter_ = filter_ + self.dead_letter_policy = dead_letter_policy + self.retry_policy = retry_policy self.retry = retry self.timeout = timeout self.metadata = metadata @@ -346,6 +385,11 @@ def execute(self, context): retain_acked_messages=self.retain_acked_messages, message_retention_duration=self.message_retention_duration, labels=self.labels, + enable_message_ordering=self.enable_message_ordering, + expiration_policy=self.expiration_policy, + filter_=self.filter_, + dead_letter_policy=self.dead_letter_policy, + retry_policy=self.retry_policy, retry=self.retry, timeout=self.timeout, metadata=self.metadata @@ -418,19 +462,18 @@ class PubSubDeleteTopicOperator(BaseOperator): @apply_defaults def __init__( - self, - topic: str, - project_id: Optional[str] = None, - fail_if_not_exists: bool = False, - gcp_conn_id: str = 'google_cloud_default', - delegate_to: Optional[str] = None, - retry: Optional[Retry] = None, - timeout: Optional[float] = None, - metadata: Optional[Sequence[Tuple[str, str]]] = None, - project: Optional[str] = None, - *args, - **kwargs) -> None: - + self, *, + topic: str, + project_id: Optional[str] = None, + fail_if_not_exists: bool = False, + gcp_conn_id: str = 'google_cloud_default', + delegate_to: Optional[str] = None, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + project: Optional[str] = None, + **kwargs + ) -> None: # To preserve backward compatibility # TODO: remove one day if project: @@ -439,7 +482,7 @@ def __init__( "the project_id parameter.", DeprecationWarning, stacklevel=2) project_id = project - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.topic = topic self.fail_if_not_exists = fail_if_not_exists @@ -530,19 +573,18 @@ class PubSubDeleteSubscriptionOperator(BaseOperator): @apply_defaults def __init__( - self, - subscription: str, - project_id: Optional[str] = None, - fail_if_not_exists: bool = False, - gcp_conn_id: str = 'google_cloud_default', - delegate_to: Optional[str] = None, - retry: Optional[Retry] = None, - timeout: Optional[float] = None, - metadata: Optional[Sequence[Tuple[str, str]]] = None, - project: Optional[str] = None, - *args, - **kwargs) -> None: - + self, *, + subscription: str, + project_id: Optional[str] = None, + fail_if_not_exists: bool = False, + gcp_conn_id: str = 'google_cloud_default', + delegate_to: Optional[str] = None, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + project: Optional[str] = None, + **kwargs + ) -> None: # To preserve backward compatibility # TODO: remove one day if project: @@ -551,7 +593,7 @@ def __init__( "the project_id parameter.", DeprecationWarning, stacklevel=2) project_id = project - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.subscription = subscription self.fail_if_not_exists = fail_if_not_exists @@ -635,16 +677,15 @@ class PubSubPublishMessageOperator(BaseOperator): @apply_defaults def __init__( - self, - topic: str, - messages: List, - project_id: Optional[str] = None, - gcp_conn_id: str = 'google_cloud_default', - delegate_to: Optional[str] = None, - project: Optional[str] = None, - *args, - **kwargs) -> None: - + self, *, + topic: str, + messages: List, + project_id: Optional[str] = None, + gcp_conn_id: str = 'google_cloud_default', + delegate_to: Optional[str] = None, + project: Optional[str] = None, + **kwargs + ) -> None: # To preserve backward compatibility # TODO: remove one day if project: @@ -653,7 +694,7 @@ def __init__( "the project_id parameter.", DeprecationWarning, stacklevel=2) project_id = project - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.topic = topic self.messages = messages @@ -720,18 +761,17 @@ class PubSubPullOperator(BaseOperator): @apply_defaults def __init__( - self, - project_id: str, - subscription: str, - max_messages: int = 5, - ack_messages: bool = False, - messages_callback: Optional[Callable[[List[ReceivedMessage], Dict[str, Any]], Any]] = None, - gcp_conn_id: str = 'google_cloud_default', - delegate_to: Optional[str] = None, - *args, - **kwargs + self, *, + project_id: str, + subscription: str, + max_messages: int = 5, + ack_messages: bool = False, + messages_callback: Optional[Callable[[List[ReceivedMessage], Dict[str, Any]], Any]] = None, + gcp_conn_id: str = 'google_cloud_default', + delegate_to: Optional[str] = None, + **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.gcp_conn_id = gcp_conn_id self.delegate_to = delegate_to self.project_id = project_id @@ -767,9 +807,9 @@ def execute(self, context): return ret def _default_message_callback( - self, - pulled_messages: List[ReceivedMessage], - context: Dict[str, Any], # pylint: disable=unused-argument + self, + pulled_messages: List[ReceivedMessage], + context: Dict[str, Any], # pylint: disable=unused-argument ): """ This method can be overridden by subclasses or by `messages_callback` constructor argument. diff --git a/airflow/providers/google/cloud/operators/spanner.py b/airflow/providers/google/cloud/operators/spanner.py index 77d1232f2d937..db47420cdf0f5 100644 --- a/airflow/providers/google/cloud/operators/spanner.py +++ b/airflow/providers/google/cloud/operators/spanner.py @@ -60,14 +60,14 @@ class SpannerDeployInstanceOperator(BaseOperator): # [END gcp_spanner_deploy_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, instance_id: str, configuration_name: str, node_count: int, display_name: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs) -> None: + **kwargs) -> None: self.instance_id = instance_id self.project_id = project_id self.configuration_name = configuration_name @@ -75,7 +75,7 @@ def __init__(self, self.display_name = display_name self.gcp_conn_id = gcp_conn_id self._validate_inputs() - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def _validate_inputs(self): if self.project_id == '': @@ -121,16 +121,16 @@ class SpannerDeleteInstanceOperator(BaseOperator): # [END gcp_spanner_delete_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, instance_id: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs) -> None: + **kwargs) -> None: self.instance_id = instance_id self.project_id = project_id self.gcp_conn_id = gcp_conn_id self._validate_inputs() - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def _validate_inputs(self): if self.project_id == '': @@ -177,20 +177,20 @@ class SpannerQueryDatabaseInstanceOperator(BaseOperator): # [END gcp_spanner_query_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, instance_id: str, database_id: str, query: Union[str, List[str]], project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs) -> None: + **kwargs) -> None: self.instance_id = instance_id self.project_id = project_id self.database_id = database_id self.query = query self.gcp_conn_id = gcp_conn_id self._validate_inputs() - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def _validate_inputs(self): if self.project_id == '': @@ -260,20 +260,20 @@ class SpannerDeployDatabaseInstanceOperator(BaseOperator): # [END gcp_spanner_database_deploy_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, instance_id: str, database_id: str, ddl_statements: List[str], project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs) -> None: + **kwargs) -> None: self.instance_id = instance_id self.project_id = project_id self.database_id = database_id self.ddl_statements = ddl_statements self.gcp_conn_id = gcp_conn_id self._validate_inputs() - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def _validate_inputs(self): if self.project_id == '': @@ -334,14 +334,14 @@ class SpannerUpdateDatabaseInstanceOperator(BaseOperator): # [END gcp_spanner_database_update_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, instance_id: str, database_id: str, ddl_statements: List[str], project_id: Optional[str] = None, operation_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs) -> None: + **kwargs) -> None: self.instance_id = instance_id self.project_id = project_id self.database_id = database_id @@ -349,7 +349,7 @@ def __init__(self, self.operation_id = operation_id self.gcp_conn_id = gcp_conn_id self._validate_inputs() - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def _validate_inputs(self): if self.project_id == '': @@ -406,18 +406,18 @@ class SpannerDeleteDatabaseInstanceOperator(BaseOperator): # [END gcp_spanner_database_delete_template_fields] @apply_defaults - def __init__(self, + def __init__(self, *, instance_id: str, database_id: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs) -> None: + **kwargs) -> None: self.instance_id = instance_id self.project_id = project_id self.database_id = database_id self.gcp_conn_id = gcp_conn_id self._validate_inputs() - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def _validate_inputs(self): if self.project_id == '': diff --git a/airflow/providers/google/cloud/operators/speech_to_text.py b/airflow/providers/google/cloud/operators/speech_to_text.py index 7286c7a16f800..0a0813a77c0e8 100644 --- a/airflow/providers/google/cloud/operators/speech_to_text.py +++ b/airflow/providers/google/cloud/operators/speech_to_text.py @@ -64,14 +64,13 @@ class CloudSpeechToTextRecognizeSpeechOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, audio: RecognitionAudio, config: RecognitionConfig, project_id: Optional[str] = None, gcp_conn_id: str = "google_cloud_default", retry: Optional[Retry] = None, timeout: Optional[float] = None, - *args, **kwargs ) -> None: self.audio = audio @@ -81,7 +80,7 @@ def __init__( self.retry = retry self.timeout = timeout self._validate_inputs() - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def _validate_inputs(self): if self.audio == "": diff --git a/airflow/providers/google/cloud/operators/stackdriver.py b/airflow/providers/google/cloud/operators/stackdriver.py index 3881acc384f00..9466e9c18e4f4 100644 --- a/airflow/providers/google/cloud/operators/stackdriver.py +++ b/airflow/providers/google/cloud/operators/stackdriver.py @@ -82,7 +82,7 @@ class StackdriverListAlertPoliciesOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults def __init__( - self, + self, *, format_: Optional[str] = None, filter_: Optional[str] = None, order_by: Optional[str] = None, @@ -93,9 +93,9 @@ def __init__( gcp_conn_id: Optional[str] = 'google_cloud_default', project_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs + **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.format_ = format_ self.filter_ = filter_ self.order_by = order_by @@ -163,7 +163,7 @@ class StackdriverEnableAlertPoliciesOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, filter_: Optional[str] = None, retry: Optional[str] = DEFAULT, timeout: Optional[float] = DEFAULT, @@ -171,9 +171,9 @@ def __init__( gcp_conn_id: Optional[str] = 'google_cloud_default', project_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs + **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.gcp_conn_id = gcp_conn_id self.project_id = project_id self.delegate_to = delegate_to @@ -235,7 +235,7 @@ class StackdriverDisableAlertPoliciesOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, filter_: Optional[str] = None, retry: Optional[str] = DEFAULT, timeout: Optional[float] = DEFAULT, @@ -243,9 +243,9 @@ def __init__( gcp_conn_id: Optional[str] = 'google_cloud_default', project_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs + **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.gcp_conn_id = gcp_conn_id self.project_id = project_id self.delegate_to = delegate_to @@ -309,7 +309,7 @@ class StackdriverUpsertAlertOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, alerts: str, retry: Optional[str] = DEFAULT, timeout: Optional[float] = DEFAULT, @@ -317,9 +317,9 @@ def __init__( gcp_conn_id: Optional[str] = 'google_cloud_default', project_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs + **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.alerts = alerts self.retry = retry self.timeout = timeout @@ -379,7 +379,7 @@ class StackdriverDeleteAlertOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, name: str, retry: Optional[str] = DEFAULT, timeout: Optional[float] = DEFAULT, @@ -387,9 +387,9 @@ def __init__( gcp_conn_id: Optional[str] = 'google_cloud_default', project_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs + **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.name = name self.retry = retry self.timeout = timeout @@ -469,7 +469,7 @@ class StackdriverListNotificationChannelsOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults def __init__( - self, + self, *, format_: Optional[str] = None, filter_: Optional[str] = None, order_by: Optional[str] = None, @@ -480,9 +480,9 @@ def __init__( gcp_conn_id: Optional[str] = 'google_cloud_default', project_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs + **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.format_ = format_ self.filter_ = filter_ self.order_by = order_by @@ -553,7 +553,7 @@ class StackdriverEnableNotificationChannelsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, filter_: Optional[str] = None, retry: Optional[str] = DEFAULT, timeout: Optional[float] = DEFAULT, @@ -561,9 +561,9 @@ def __init__( gcp_conn_id: Optional[str] = 'google_cloud_default', project_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs + **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.filter_ = filter_ self.retry = retry self.timeout = timeout @@ -626,7 +626,7 @@ class StackdriverDisableNotificationChannelsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, filter_: Optional[str] = None, retry: Optional[str] = DEFAULT, timeout: Optional[float] = DEFAULT, @@ -634,9 +634,9 @@ def __init__( gcp_conn_id: Optional[str] = 'google_cloud_default', project_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs + **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.filter_ = filter_ self.retry = retry self.timeout = timeout @@ -701,7 +701,7 @@ class StackdriverUpsertNotificationChannelOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, channels: str, retry: Optional[str] = DEFAULT, timeout: Optional[str] = DEFAULT, @@ -709,9 +709,9 @@ def __init__( gcp_conn_id: Optional[str] = 'google_cloud_default', project_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs + **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.channels = channels self.retry = retry self.timeout = timeout @@ -772,7 +772,7 @@ class StackdriverDeleteNotificationChannelOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, name: str, retry: Optional[str] = DEFAULT, timeout: Optional[float] = DEFAULT, @@ -780,9 +780,9 @@ def __init__( gcp_conn_id: Optional[str] = 'google_cloud_default', project_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs + **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.name = name self.retry = retry self.timeout = timeout diff --git a/airflow/providers/google/cloud/operators/tasks.py b/airflow/providers/google/cloud/operators/tasks.py index 92c2da35f5c03..a7ded9076e095 100644 --- a/airflow/providers/google/cloud/operators/tasks.py +++ b/airflow/providers/google/cloud/operators/tasks.py @@ -76,7 +76,7 @@ class CloudTasksQueueCreateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, task_queue: Queue, project_id: Optional[str] = None, @@ -85,10 +85,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.task_queue = task_queue self.project_id = project_id @@ -169,7 +168,7 @@ class CloudTasksQueueUpdateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, task_queue: Queue, project_id: Optional[str] = None, location: Optional[str] = None, @@ -179,10 +178,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.task_queue = task_queue self.project_id = project_id self.location = location @@ -237,7 +235,7 @@ class CloudTasksQueueGetOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, queue_name: str, project_id: Optional[str] = None, @@ -245,10 +243,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.queue_name = queue_name self.project_id = project_id @@ -302,7 +299,7 @@ class CloudTasksQueuesListOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, project_id: Optional[str] = None, results_filter: Optional[str] = None, @@ -311,10 +308,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.project_id = project_id self.results_filter = results_filter @@ -366,7 +362,7 @@ class CloudTasksQueueDeleteOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, queue_name: str, project_id: Optional[str] = None, @@ -374,10 +370,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.queue_name = queue_name self.project_id = project_id @@ -427,7 +422,7 @@ class CloudTasksQueuePurgeOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, queue_name: str, project_id: Optional[str] = None, @@ -435,10 +430,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.queue_name = queue_name self.project_id = project_id @@ -489,7 +483,7 @@ class CloudTasksQueuePauseOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, queue_name: str, project_id: Optional[str] = None, @@ -497,10 +491,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.queue_name = queue_name self.project_id = project_id @@ -551,7 +544,7 @@ class CloudTasksQueueResumeOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, queue_name: str, project_id: Optional[str] = None, @@ -559,10 +552,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.queue_name = queue_name self.project_id = project_id @@ -629,7 +621,7 @@ class CloudTasksTaskCreateOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, location: str, queue_name: str, task: Union[Dict, Task], @@ -640,10 +632,9 @@ def __init__( # pylint: disable=too-many-arguments timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.queue_name = queue_name self.task = task @@ -711,7 +702,7 @@ class CloudTasksTaskGetOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, queue_name: str, task_name: str, @@ -721,10 +712,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.queue_name = queue_name self.task_name = task_name @@ -785,7 +775,7 @@ class CloudTasksTasksListOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, queue_name: str, project_id: Optional[str] = None, @@ -795,10 +785,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.queue_name = queue_name self.project_id = project_id @@ -860,7 +849,7 @@ class CloudTasksTaskDeleteOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, queue_name: str, task_name: str, @@ -869,10 +858,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.queue_name = queue_name self.task_name = task_name @@ -935,7 +923,7 @@ class CloudTasksTaskRunOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, queue_name: str, task_name: str, @@ -945,10 +933,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.queue_name = queue_name self.task_name = task_name diff --git a/airflow/providers/google/cloud/operators/text_to_speech.py b/airflow/providers/google/cloud/operators/text_to_speech.py index d652d3dd1ab79..fc7f0f2ee4e69 100644 --- a/airflow/providers/google/cloud/operators/text_to_speech.py +++ b/airflow/providers/google/cloud/operators/text_to_speech.py @@ -81,7 +81,7 @@ class CloudTextToSpeechSynthesizeOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, input_data: Union[Dict, SynthesisInput], voice: Union[Dict, VoiceSelectionParams], audio_config: Union[Dict, AudioConfig], @@ -91,7 +91,6 @@ def __init__( gcp_conn_id: str = "google_cloud_default", retry: Optional[Retry] = None, timeout: Optional[float] = None, - *args, **kwargs ) -> None: self.input_data = input_data @@ -104,7 +103,7 @@ def __init__( self.retry = retry self.timeout = timeout self._validate_inputs() - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def _validate_inputs(self): for parameter in [ diff --git a/airflow/providers/google/cloud/operators/translate.py b/airflow/providers/google/cloud/operators/translate.py index 3d2e2d3ce9d47..a165806a558c5 100644 --- a/airflow/providers/google/cloud/operators/translate.py +++ b/airflow/providers/google/cloud/operators/translate.py @@ -79,17 +79,16 @@ class CloudTranslateTextOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, values: Union[List[str], str], target_language: str, format_: str, source_language: Optional[str], model: str, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.values = values self.target_language = target_language self.format_ = format_ diff --git a/airflow/providers/google/cloud/operators/translate_speech.py b/airflow/providers/google/cloud/operators/translate_speech.py index 6c5781d0f09ce..6b610b5fb6d3e 100644 --- a/airflow/providers/google/cloud/operators/translate_speech.py +++ b/airflow/providers/google/cloud/operators/translate_speech.py @@ -103,7 +103,7 @@ class CloudTranslateSpeechOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, audio: RecognitionAudio, config: RecognitionConfig, target_language: str, @@ -112,10 +112,9 @@ def __init__( model: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.audio = audio self.config = config self.target_language = target_language diff --git a/airflow/providers/google/cloud/operators/video_intelligence.py b/airflow/providers/google/cloud/operators/video_intelligence.py index c181f257766ba..5a3911231aa0f 100644 --- a/airflow/providers/google/cloud/operators/video_intelligence.py +++ b/airflow/providers/google/cloud/operators/video_intelligence.py @@ -27,6 +27,7 @@ from airflow.models import BaseOperator from airflow.providers.google.cloud.hooks.video_intelligence import CloudVideoIntelligenceHook +from airflow.utils.decorators import apply_defaults class CloudVideoIntelligenceDetectVideoLabelsOperator(BaseOperator): @@ -68,8 +69,9 @@ class CloudVideoIntelligenceDetectVideoLabelsOperator(BaseOperator): template_fields = ("input_uri", "output_uri", "gcp_conn_id") # [END gcp_video_intelligence_detect_labels_template_fields] + @apply_defaults def __init__( - self, + self, *, input_uri: str, input_content: Optional[bytes] = None, output_uri: Optional[str] = None, @@ -78,10 +80,9 @@ def __init__( retry: Optional[Retry] = None, timeout: Optional[float] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.input_uri = input_uri self.input_content = input_content self.output_uri = output_uri @@ -147,8 +148,9 @@ class CloudVideoIntelligenceDetectVideoExplicitContentOperator(BaseOperator): template_fields = ("input_uri", "output_uri", "gcp_conn_id") # [END gcp_video_intelligence_detect_explicit_content_template_fields] + @apply_defaults def __init__( - self, + self, *, input_uri: str, output_uri: Optional[str] = None, input_content: Optional[bytes] = None, @@ -157,10 +159,9 @@ def __init__( retry: Optional[Retry] = None, timeout: Optional[float] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.input_uri = input_uri self.output_uri = output_uri self.input_content = input_content @@ -226,8 +227,9 @@ class CloudVideoIntelligenceDetectVideoShotsOperator(BaseOperator): template_fields = ("input_uri", "output_uri", "gcp_conn_id") # [END gcp_video_intelligence_detect_video_shots_template_fields] + @apply_defaults def __init__( - self, + self, *, input_uri: str, output_uri: Optional[str] = None, input_content: Optional[bytes] = None, @@ -236,10 +238,9 @@ def __init__( retry: Optional[Retry] = None, timeout: Optional[float] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.input_uri = input_uri self.output_uri = output_uri self.input_content = input_content diff --git a/airflow/providers/google/cloud/operators/vision.py b/airflow/providers/google/cloud/operators/vision.py index 514db58a4634d..fd07d2af40600 100644 --- a/airflow/providers/google/cloud/operators/vision.py +++ b/airflow/providers/google/cloud/operators/vision.py @@ -75,7 +75,7 @@ class CloudVisionCreateProductSetOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, product_set: Union[dict, ProductSet], location: str, project_id: Optional[str] = None, @@ -84,10 +84,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.project_id = project_id self.product_set = product_set @@ -151,7 +150,7 @@ class CloudVisionGetProductSetOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, product_set_id: str, project_id: Optional[str] = None, @@ -159,10 +158,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.project_id = project_id self.product_set_id = product_set_id @@ -236,7 +234,7 @@ class CloudVisionUpdateProductSetOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, product_set: Union[Dict, ProductSet], location: Optional[str] = None, product_set_id: Optional[str] = None, @@ -246,10 +244,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.product_set = product_set self.update_mask = update_mask self.location = location @@ -310,7 +307,7 @@ class CloudVisionDeleteProductSetOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, product_set_id: str, project_id: Optional[str] = None, @@ -318,10 +315,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.project_id = project_id self.product_set_id = product_set_id @@ -388,7 +384,7 @@ class CloudVisionCreateProductOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, product: str, project_id: Optional[str] = None, @@ -397,10 +393,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.product = product self.project_id = project_id @@ -467,7 +462,7 @@ class CloudVisionGetProductOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, product_id: str, project_id: Optional[str] = None, @@ -475,10 +470,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.product_id = product_id self.project_id = project_id @@ -563,7 +557,7 @@ class CloudVisionUpdateProductOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, product: Union[Dict, Product], location: Optional[str] = None, product_id: Optional[str] = None, @@ -573,10 +567,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.product = product self.location = location self.product_id = product_id @@ -642,7 +635,7 @@ class CloudVisionDeleteProductOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, product_id: str, project_id: Optional[str] = None, @@ -650,10 +643,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.product_id = product_id self.project_id = project_id @@ -703,15 +695,14 @@ class CloudVisionImageAnnotateOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, request: Union[Dict, AnnotateImageRequest], retry: Optional[Retry] = None, timeout: Optional[float] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.request = request self.retry = retry self.timeout = timeout @@ -783,7 +774,7 @@ class CloudVisionCreateReferenceImageOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, reference_image: Union[Dict, ReferenceImage], product_id: str, @@ -793,10 +784,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.product_id = product_id self.reference_image = reference_image @@ -873,7 +863,7 @@ class CloudVisionDeleteReferenceImageOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, location: str, product_id: str, reference_image_id: str, @@ -882,10 +872,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.location = location self.product_id = product_id self.reference_image_id = reference_image_id @@ -950,7 +939,7 @@ class CloudVisionAddProductToProductSetOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, product_set_id: str, product_id: str, location: str, @@ -959,10 +948,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.product_set_id = product_set_id self.product_id = product_id self.location = location @@ -1021,7 +1009,7 @@ class CloudVisionRemoveProductFromProductSetOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, product_set_id: str, product_id: str, location: str, @@ -1030,10 +1018,9 @@ def __init__( timeout: Optional[float] = None, metadata: Optional[MetaData] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.product_set_id = product_set_id self.product_id = product_id self.location = location @@ -1098,10 +1085,9 @@ def __init__( web_detection_params: Optional[Dict] = None, additional_properties: Optional[Dict] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.image = image self.max_results = max_results self.retry = retry @@ -1167,10 +1153,9 @@ def __init__( web_detection_params: Optional[Dict] = None, additional_properties: Optional[Dict] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.image = image self.max_results = max_results self.retry = retry @@ -1227,10 +1212,9 @@ def __init__( timeout: Optional[float] = None, additional_properties: Optional[Dict] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.image = image self.max_results = max_results self.retry = retry @@ -1283,10 +1267,9 @@ def __init__( timeout: Optional[float] = None, additional_properties: Optional[Dict] = None, gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.image = image self.max_results = max_results self.retry = retry diff --git a/airflow/providers/google/cloud/secrets/secret_manager.py b/airflow/providers/google/cloud/secrets/secret_manager.py index 2f4d293e8ee62..88f1421ed8f45 100644 --- a/airflow/providers/google/cloud/secrets/secret_manager.py +++ b/airflow/providers/google/cloud/secrets/secret_manager.py @@ -64,9 +64,10 @@ class CloudSecretManagerBackend(BaseSecretsBackend, LoggingMixin): :type gcp_keyfile_dict: dict :param gcp_scopes: Comma-separated string containing GCP scopes :type gcp_scopes: str - :param project_id: Project id (if you want to override the project_id from credentials) + :param project_id: Project ID to read the secrets from. If not passed, the project ID from credentials + will be used. :type project_id: str - :param sep: separator used to concatenate connections_prefix and conn_id. Default: "-" + :param sep: Separator used to concatenate connections_prefix and conn_id. Default: "-" :type sep: str """ def __init__( diff --git a/airflow/providers/google/cloud/sensors/bigquery.py b/airflow/providers/google/cloud/sensors/bigquery.py index c1f722a7bb7cf..463305b19f963 100644 --- a/airflow/providers/google/cloud/sensors/bigquery.py +++ b/airflow/providers/google/cloud/sensors/bigquery.py @@ -50,15 +50,15 @@ class BigQueryTableExistenceSensor(BaseSensorOperator): ui_color = '#f0eee4' @apply_defaults - def __init__(self, + def __init__(self, *, project_id: str, dataset_id: str, table_id: str, bigquery_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: + **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.project_id = project_id self.dataset_id = dataset_id self.table_id = table_id diff --git a/airflow/providers/google/cloud/sensors/bigquery_dts.py b/airflow/providers/google/cloud/sensors/bigquery_dts.py index 06e3e598bfe9b..018d037d3b0eb 100644 --- a/airflow/providers/google/cloud/sensors/bigquery_dts.py +++ b/airflow/providers/google/cloud/sensors/bigquery_dts.py @@ -69,6 +69,7 @@ class BigQueryDataTransferServiceTransferRunSensor(BaseSensorOperator): @apply_defaults def __init__( self, + *, run_id: str, transfer_config_id: str, expected_statuses: Union[Set[str], str] = 'SUCCEEDED', @@ -77,10 +78,9 @@ def __init__( retry: Optional[Retry] = None, request_timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.run_id = run_id self.transfer_config_id = transfer_config_id self.retry = retry diff --git a/airflow/providers/google/cloud/sensors/bigtable.py b/airflow/providers/google/cloud/sensors/bigtable.py index 11bc48ed839f1..e3f3263f2c791 100644 --- a/airflow/providers/google/cloud/sensors/bigtable.py +++ b/airflow/providers/google/cloud/sensors/bigtable.py @@ -55,11 +55,11 @@ class BigtableTableReplicationCompletedSensor(BaseSensorOperator, BigtableValida @apply_defaults def __init__( self, + *, instance_id: str, table_id: str, project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs ) -> None: self.project_id = project_id @@ -67,7 +67,7 @@ def __init__( self.table_id = table_id self.gcp_conn_id = gcp_conn_id self._validate_inputs() - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def poke(self, context): hook = BigtableHook(gcp_conn_id=self.gcp_conn_id) diff --git a/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py b/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py index d2153b0eb7c57..a933e273079e9 100644 --- a/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +++ b/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py @@ -55,15 +55,14 @@ class CloudDataTransferServiceJobStatusSensor(BaseSensorOperator): @apply_defaults def __init__( - self, + self, *, job_name: str, expected_statuses: Union[Set[str], str], project_id: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.job_name = job_name self.expected_statuses = ( {expected_statuses} if isinstance(expected_statuses, str) else expected_statuses diff --git a/airflow/providers/google/cloud/sensors/gcs.py b/airflow/providers/google/cloud/sensors/gcs.py index a32c9cbab4b13..dfe41a09c77f6 100644 --- a/airflow/providers/google/cloud/sensors/gcs.py +++ b/airflow/providers/google/cloud/sensors/gcs.py @@ -50,14 +50,14 @@ class GCSObjectExistenceSensor(BaseSensorOperator): ui_color = '#f0eee4' @apply_defaults - def __init__(self, + def __init__(self, *, bucket: str, object: str, # pylint: disable=redefined-builtin google_cloud_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: + **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.bucket = bucket self.object = object self.google_cloud_conn_id = google_cloud_conn_id @@ -111,9 +111,9 @@ def __init__(self, ts_func: Callable = ts_function, google_cloud_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: + **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.bucket = bucket self.object = object self.ts_func = ts_func @@ -158,8 +158,8 @@ def __init__(self, prefix: str, google_cloud_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.bucket = bucket self.prefix = prefix self.google_cloud_conn_id = google_cloud_conn_id @@ -239,9 +239,9 @@ def __init__(self, allow_delete: bool = True, google_cloud_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, - *args, **kwargs) -> None: + **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.bucket = bucket self.prefix = prefix @@ -259,7 +259,10 @@ def __init__(self, def _get_gcs_hook(self): if not self.hook: - self.hook = GCSHook() + self.hook = GCSHook( + gcp_conn_id=self.google_cloud_conn_id, + delegate_to=self.delegate_to, + ) return self.hook def is_bucket_updated(self, current_objects: Set[str]) -> bool: diff --git a/airflow/providers/google/cloud/sensors/pubsub.py b/airflow/providers/google/cloud/sensors/pubsub.py index d30320a3efa11..f1c6af88605fa 100644 --- a/airflow/providers/google/cloud/sensors/pubsub.py +++ b/airflow/providers/google/cloud/sensors/pubsub.py @@ -93,7 +93,7 @@ class PubSubPullSensor(BaseSensorOperator): @apply_defaults def __init__( - self, + self, *, project_id: str, subscription: str, max_messages: int = 5, @@ -103,7 +103,6 @@ def __init__( messages_callback: Optional[Callable[[List[ReceivedMessage], Dict[str, Any]], Any]] = None, delegate_to: Optional[str] = None, project: Optional[str] = None, - *args, **kwargs ) -> None: # To preserve backward compatibility @@ -126,7 +125,7 @@ def __init__( stacklevel=2 ) - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.gcp_conn_id = gcp_conn_id self.delegate_to = delegate_to self.project_id = project_id diff --git a/airflow/providers/google/cloud/transfers/adls_to_gcs.py b/airflow/providers/google/cloud/transfers/adls_to_gcs.py index 8afe9d95fc515..a670537e99412 100644 --- a/airflow/providers/google/cloud/transfers/adls_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/adls_to_gcs.py @@ -98,7 +98,7 @@ class ADLSToGCSOperator(AzureDataLakeStorageListOperator): ui_color = '#f0eee4' @apply_defaults - def __init__(self, + def __init__(self, *, src_adls: str, dest_gcs: str, azure_data_lake_conn_id: str, diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py b/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py index f164cf2ce20a0..a9b7e7168a52d 100644 --- a/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +++ b/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py @@ -75,7 +75,7 @@ class BigQueryToBigQueryOperator(BaseOperator): ui_color = '#e6f0e4' @apply_defaults - def __init__(self, # pylint: disable=too-many-arguments + def __init__(self, *, # pylint: disable=too-many-arguments source_project_dataset_tables: Union[List[str], str], destination_project_dataset_table: str, write_disposition: str = 'WRITE_EMPTY', @@ -86,9 +86,8 @@ def __init__(self, # pylint: disable=too-many-arguments labels: Optional[Dict] = None, encryption_configuration: Optional[Dict] = None, location: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if bigquery_conn_id: warnings.warn( diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py b/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py index 2c7002e1e2d25..76e675363d00a 100644 --- a/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py @@ -73,7 +73,7 @@ class BigQueryToGCSOperator(BaseOperator): ui_color = '#e4e6f0' @apply_defaults - def __init__(self, # pylint: disable=too-many-arguments + def __init__(self, *, # pylint: disable=too-many-arguments source_project_dataset_table: str, destination_cloud_storage_uris: List[str], compression: str = 'NONE', @@ -85,9 +85,8 @@ def __init__(self, # pylint: disable=too-many-arguments delegate_to: Optional[str] = None, labels: Optional[Dict] = None, location: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if bigquery_conn_id: warnings.warn( diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py b/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py index ab0174d531524..0bd73021cf498 100644 --- a/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +++ b/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py @@ -78,7 +78,7 @@ class BigQueryToMySqlOperator(BaseOperator): template_fields = ('dataset_id', 'table_id', 'mysql_table') @apply_defaults - def __init__(self, # pylint: disable=too-many-arguments + def __init__(self, *, # pylint: disable=too-many-arguments dataset_table: str, mysql_table: str, selected_fields: Optional[str] = None, @@ -89,9 +89,8 @@ def __init__(self, # pylint: disable=too-many-arguments replace: bool = False, batch_size: int = 1000, location: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.selected_fields = selected_fields self.gcp_conn_id = gcp_conn_id self.mysql_conn_id = mysql_conn_id diff --git a/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py b/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py index 16f7e2baa8e25..dfc1b259d519f 100644 --- a/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py @@ -82,7 +82,7 @@ class CassandraToGCSOperator(BaseOperator): ui_color = '#a0e08c' @apply_defaults - def __init__(self, # pylint: disable=too-many-arguments + def __init__(self, *, # pylint: disable=too-many-arguments cql: str, bucket: str, filename: str, @@ -93,9 +93,8 @@ def __init__(self, # pylint: disable=too-many-arguments gcp_conn_id: str = 'google_cloud_default', google_cloud_storage_conn_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if google_cloud_storage_conn_id: warnings.warn( diff --git a/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py b/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py index 0b9140ec2f36c..fe7083fad33ce 100644 --- a/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py @@ -72,7 +72,7 @@ class FacebookAdsReportToGcsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, bucket_name: str, object_name: str, fields: List[str], @@ -81,10 +81,9 @@ def __init__( api_version: str = "v6.0", gcp_conn_id: str = "google_cloud_default", facebook_conn_id: str = "facebook_default", - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.bucket_name = bucket_name self.object_name = object_name self.gcp_conn_id = gcp_conn_id diff --git a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py index 241481e8dc1b4..c44103dd4d329 100644 --- a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +++ b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py @@ -157,7 +157,7 @@ class GCSToBigQueryOperator(BaseOperator): # pylint: disable=too-many-locals,too-many-arguments @apply_defaults - def __init__(self, + def __init__(self, *, bucket, source_objects, destination_project_dataset_table, @@ -187,9 +187,9 @@ def __init__(self, autodetect=True, encryption_configuration=None, location=None, - *args, **kwargs): + **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) # GCS config if src_fmt_configs is None: diff --git a/airflow/providers/google/cloud/transfers/gcs_to_gcs.py b/airflow/providers/google/cloud/transfers/gcs_to_gcs.py index 70b6288e7241f..d1afd19505ad1 100644 --- a/airflow/providers/google/cloud/transfers/gcs_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/gcs_to_gcs.py @@ -170,7 +170,7 @@ class GCSToGCSOperator(BaseOperator): ui_color = '#f0eee4' @apply_defaults - def __init__(self, # pylint: disable=too-many-arguments + def __init__(self, *, # pylint: disable=too-many-arguments source_bucket, source_object=None, source_objects=None, @@ -185,9 +185,8 @@ def __init__(self, # pylint: disable=too-many-arguments last_modified_time=None, maximum_modified_time=None, is_older_than=None, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if google_cloud_storage_conn_id: warnings.warn( "The google_cloud_storage_conn_id parameter has been deprecated. You should pass " diff --git a/airflow/providers/google/cloud/transfers/gcs_to_local.py b/airflow/providers/google/cloud/transfers/gcs_to_local.py index 9c564bda4f0f5..6fab34bede76f 100644 --- a/airflow/providers/google/cloud/transfers/gcs_to_local.py +++ b/airflow/providers/google/cloud/transfers/gcs_to_local.py @@ -67,7 +67,7 @@ class GCSToLocalFilesystemOperator(BaseOperator): ui_color = '#f0eee4' @apply_defaults - def __init__(self, + def __init__(self, *, bucket: str, object_name: Optional[str] = None, filename: Optional[str] = None, @@ -75,7 +75,6 @@ def __init__(self, gcp_conn_id: str = 'google_cloud_default', google_cloud_storage_conn_id: Optional[str] = None, delegate_to: Optional[str] = None, - *args, **kwargs) -> None: # To preserve backward compatibility # TODO: Remove one day @@ -95,7 +94,7 @@ def __init__(self, "the gcp_conn_id parameter.", DeprecationWarning, stacklevel=3) gcp_conn_id = google_cloud_storage_conn_id - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.bucket = bucket self.object = object_name self.filename = filename # noqa diff --git a/airflow/providers/google/cloud/transfers/gcs_to_sftp.py b/airflow/providers/google/cloud/transfers/gcs_to_sftp.py index dfa83528d97fa..99d21cb0365fa 100644 --- a/airflow/providers/google/cloud/transfers/gcs_to_sftp.py +++ b/airflow/providers/google/cloud/transfers/gcs_to_sftp.py @@ -73,7 +73,7 @@ class GCSToSFTPOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults def __init__( - self, + self, *, source_bucket: str, source_object: str, destination_path: str, @@ -81,10 +81,9 @@ def __init__( gcp_conn_id: str = "google_cloud_default", sftp_conn_id: str = "ssh_default", delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.source_bucket = source_bucket self.source_object = source_object diff --git a/airflow/providers/google/cloud/transfers/local_to_gcs.py b/airflow/providers/google/cloud/transfers/local_to_gcs.py index 1995261f9291e..bbf8de229f8ef 100644 --- a/airflow/providers/google/cloud/transfers/local_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/local_to_gcs.py @@ -60,7 +60,7 @@ class LocalFilesystemToGCSOperator(BaseOperator): template_fields = ('src', 'dst', 'bucket') @apply_defaults - def __init__(self, + def __init__(self, *, src, dst, bucket, @@ -69,9 +69,8 @@ def __init__(self, mime_type='application/octet-stream', delegate_to=None, gzip=False, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if google_cloud_storage_conn_id: warnings.warn( diff --git a/airflow/providers/google/cloud/transfers/mssql_to_gcs.py b/airflow/providers/google/cloud/transfers/mssql_to_gcs.py index d6d1c806991ca..face7667e05ac 100644 --- a/airflow/providers/google/cloud/transfers/mssql_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/mssql_to_gcs.py @@ -58,11 +58,10 @@ class MSSQLToGCSOperator(BaseSQLToGCSOperator): } @apply_defaults - def __init__(self, + def __init__(self, *, mssql_conn_id='mssql_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.mssql_conn_id = mssql_conn_id def query(self): diff --git a/airflow/providers/google/cloud/transfers/mysql_to_gcs.py b/airflow/providers/google/cloud/transfers/mysql_to_gcs.py index 8ab753959b685..5d98a2d175e09 100644 --- a/airflow/providers/google/cloud/transfers/mysql_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/mysql_to_gcs.py @@ -62,12 +62,11 @@ class MySQLToGCSOperator(BaseSQLToGCSOperator): } @apply_defaults - def __init__(self, + def __init__(self, *, mysql_conn_id='mysql_default', ensure_utc=False, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.mysql_conn_id = mysql_conn_id self.ensure_utc = ensure_utc diff --git a/airflow/providers/google/cloud/transfers/postgres_to_gcs.py b/airflow/providers/google/cloud/transfers/postgres_to_gcs.py index 1f0aabf97abe9..816ad277552fb 100644 --- a/airflow/providers/google/cloud/transfers/postgres_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/postgres_to_gcs.py @@ -58,11 +58,10 @@ class PostgresToGCSOperator(BaseSQLToGCSOperator): } @apply_defaults - def __init__(self, + def __init__(self, *, postgres_conn_id='postgres_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.postgres_conn_id = postgres_conn_id def query(self): diff --git a/airflow/providers/google/cloud/transfers/presto_to_gcs.py b/airflow/providers/google/cloud/transfers/presto_to_gcs.py index 855b99a84511c..f94024fdbebdd 100644 --- a/airflow/providers/google/cloud/transfers/presto_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/presto_to_gcs.py @@ -180,12 +180,11 @@ class PrestoToGCSOperator(BaseSQLToGCSOperator): @apply_defaults def __init__( - self, + self, *, presto_conn_id: str = "presto_default", - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.presto_conn_id = presto_conn_id def query(self): diff --git a/airflow/providers/google/cloud/transfers/s3_to_gcs.py b/airflow/providers/google/cloud/transfers/s3_to_gcs.py index 009f4e6e6830e..c1390dc9f2146 100644 --- a/airflow/providers/google/cloud/transfers/s3_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/s3_to_gcs.py @@ -93,7 +93,7 @@ class S3ToGCSOperator(S3ListOperator): # pylint: disable=too-many-arguments @apply_defaults - def __init__(self, + def __init__(self, *, bucket, prefix='', delimiter='', @@ -105,7 +105,6 @@ def __init__(self, delegate_to=None, replace=False, gzip=False, - *args, **kwargs): super().__init__( @@ -113,7 +112,6 @@ def __init__(self, prefix=prefix, delimiter=delimiter, aws_conn_id=aws_conn_id, - *args, **kwargs) if dest_gcs_conn_id: diff --git a/airflow/providers/google/cloud/transfers/sftp_to_gcs.py b/airflow/providers/google/cloud/transfers/sftp_to_gcs.py index 38ef170eb6279..cb9764b683b44 100644 --- a/airflow/providers/google/cloud/transfers/sftp_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/sftp_to_gcs.py @@ -74,7 +74,7 @@ class SFTPToGCSOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, source_path: str, destination_bucket: str, destination_path: Optional[str] = None, @@ -84,10 +84,9 @@ def __init__( mime_type: str = "application/octet-stream", gzip: bool = False, move_object: bool = False, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.source_path = source_path self.destination_path = self._set_destination_path(destination_path) diff --git a/airflow/providers/google/cloud/transfers/sheets_to_gcs.py b/airflow/providers/google/cloud/transfers/sheets_to_gcs.py index 7104f584a7ce3..c2bbe25d154ff 100644 --- a/airflow/providers/google/cloud/transfers/sheets_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/sheets_to_gcs.py @@ -22,6 +22,7 @@ from airflow.models import BaseOperator from airflow.providers.google.cloud.hooks.gcs import GCSHook from airflow.providers.google.suite.hooks.sheets import GSheetsHook +from airflow.utils.decorators import apply_defaults class GoogleSheetsToGCSOperator(BaseOperator): @@ -51,18 +52,18 @@ class GoogleSheetsToGCSOperator(BaseOperator): template_fields = ["spreadsheet_id", "destination_bucket", "destination_path", "sheet_filter"] + @apply_defaults def __init__( - self, + self, *, spreadsheet_id: str, destination_bucket: str, sheet_filter: Optional[List[str]] = None, destination_path: Optional[str] = None, gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.gcp_conn_id = gcp_conn_id self.spreadsheet_id = spreadsheet_id self.sheet_filter = sheet_filter diff --git a/airflow/providers/google/cloud/transfers/sql_to_gcs.py b/airflow/providers/google/cloud/transfers/sql_to_gcs.py index 216c1fa901229..1473c730e8973 100644 --- a/airflow/providers/google/cloud/transfers/sql_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/sql_to_gcs.py @@ -79,7 +79,7 @@ class BaseSQLToGCSOperator(BaseOperator): ui_color = '#a0e08c' @apply_defaults - def __init__(self, # pylint: disable=too-many-arguments + def __init__(self, *, # pylint: disable=too-many-arguments sql, bucket, filename, @@ -93,9 +93,8 @@ def __init__(self, # pylint: disable=too-many-arguments gcp_conn_id='google_cloud_default', google_cloud_storage_conn_id=None, delegate_to=None, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if google_cloud_storage_conn_id: warnings.warn( diff --git a/airflow/providers/google/cloud/utils/field_sanitizer.py b/airflow/providers/google/cloud/utils/field_sanitizer.py index 0236fda5b78f1..f9ebc976b960c 100644 --- a/airflow/providers/google/cloud/utils/field_sanitizer.py +++ b/airflow/providers/google/cloud/utils/field_sanitizer.py @@ -68,8 +68,8 @@ >>> } >>> } >>> sanitizer=GcpBodyFieldSanitizer(FIELDS_TO_SANITIZE) ->>> SANITIZED_BODY = sanitizer.sanitize(body) ->>> json.dumps(SANITIZED_BODY, indent=2) +>>> sanitizer.sanitize(body) +>>> json.dumps(body, indent=2) { "name": "instance", "properties": { diff --git a/airflow/providers/google/firebase/operators/firestore.py b/airflow/providers/google/firebase/operators/firestore.py index 0795cd9142acc..f187a56dae8be 100644 --- a/airflow/providers/google/firebase/operators/firestore.py +++ b/airflow/providers/google/firebase/operators/firestore.py @@ -52,15 +52,15 @@ class CloudFirestoreExportDatabaseOperator(BaseOperator): @apply_defaults def __init__( self, + *, body: Dict, database_id: str = "(default)", project_id: Optional[str] = None, gcp_conn_id: str = "google_cloud_default", api_version: str = "v1", - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.database_id = database_id self.body = body self.project_id = project_id diff --git a/airflow/providers/google/marketing_platform/example_dags/example_analytics.py b/airflow/providers/google/marketing_platform/example_dags/example_analytics.py index da60947bd2605..79578c4dc78c5 100644 --- a/airflow/providers/google/marketing_platform/example_dags/example_analytics.py +++ b/airflow/providers/google/marketing_platform/example_dags/example_analytics.py @@ -37,12 +37,10 @@ ) DATA_ID = "kjdDu3_tQa6n8Q1kXFtSmg" -default_args = {"start_date": dates.days_ago(1)} - with models.DAG( "example_google_analytics", - default_args=default_args, - schedule_interval=None, # Override to match your needs + schedule_interval=None, # Override to match your needs, + start_date=dates.days_ago(1), ) as dag: # [START howto_marketing_platform_list_accounts_operator] list_account = GoogleAnalyticsListAccountsOperator(task_id="list_account") diff --git a/airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py b/airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py index 8366fc570ef67..0fba00f29ba7c 100644 --- a/airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py +++ b/airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py @@ -83,12 +83,10 @@ "value": 123.4, } -default_args = {"start_date": dates.days_ago(1)} - with models.DAG( "example_campaign_manager", - default_args=default_args, - schedule_interval=None, # Override to match your needs + schedule_interval=None, # Override to match your needs, + start_date=dates.days_ago(1) ) as dag: # [START howto_campaign_manager_insert_report_operator] create_report = GoogleCampaignManagerInsertReportOperator( diff --git a/airflow/providers/google/marketing_platform/example_dags/example_display_video.py b/airflow/providers/google/marketing_platform/example_dags/example_display_video.py index df4e538eff168..daa008a5e2108 100644 --- a/airflow/providers/google/marketing_platform/example_dags/example_display_video.py +++ b/airflow/providers/google/marketing_platform/example_dags/example_display_video.py @@ -80,13 +80,11 @@ "fileSpec": "EWF"} # [END howto_display_video_env_variables] -default_args = {"start_date": dates.days_ago(1)} - with models.DAG( "example_display_video", - default_args=default_args, - schedule_interval=None, # Override to match your needs -) as dag: + schedule_interval=None, # Override to match your needs, + start_date=dates.days_ago(1) +) as dag1: # [START howto_google_display_video_createquery_report_operator] create_report = GoogleDisplayVideo360CreateReportOperator( body=REPORT, task_id="create_report" @@ -121,6 +119,13 @@ ) # [END howto_google_display_video_deletequery_report_operator] + create_report >> run_report >> wait_for_report >> get_report >> delete_report + +with models.DAG( + "example_display_video_misc", + schedule_interval=None, # Override to match your needs, + start_date=dates.days_ago(1) +) as dag2: # [START howto_google_display_video_upload_multiple_entity_read_files_to_big_query] upload_erf_to_bq = GCSToBigQueryOperator( task_id='upload_erf_to_bq', @@ -128,7 +133,7 @@ source_objects=ERF_SOURCE_OBJECT, destination_project_dataset_table=f"{BQ_DATA_SET}.gcs_to_bq_table", write_disposition='WRITE_TRUNCATE', - dag=dag) + ) # [END howto_google_display_video_upload_multiple_entity_read_files_to_big_query] # [START howto_google_display_video_download_line_items_operator] @@ -149,6 +154,11 @@ ) # [END howto_google_display_video_upload_line_items_operator] +with models.DAG( + "example_display_video_sdf", + schedule_interval=None, # Override to match your needs, + start_date=dates.days_ago(1) +) as dag3: # [START howto_google_display_video_create_sdf_download_task_operator] create_sdf_download_task = GoogleDisplayVideo360CreateSDFDownloadTaskOperator( task_id="create_sdf_download_task", body_request=CREATE_SDF_DOWNLOAD_TASK_BODY_REQUEST @@ -183,9 +193,7 @@ {"name": "post_abbr", "type": "STRING", "mode": "NULLABLE"}, ], write_disposition="WRITE_TRUNCATE", - dag=dag, ) # [END howto_google_display_video_gcs_to_big_query_operator] - create_report >> run_report >> wait_for_report >> get_report >> delete_report create_sdf_download_task >> wait_for_operation >> save_sdf_in_gcs >> upload_sdf_to_big_query diff --git a/airflow/providers/google/marketing_platform/example_dags/example_search_ads.py b/airflow/providers/google/marketing_platform/example_dags/example_search_ads.py index 3a62957b7f470..4cc9328b50f8d 100644 --- a/airflow/providers/google/marketing_platform/example_dags/example_search_ads.py +++ b/airflow/providers/google/marketing_platform/example_dags/example_search_ads.py @@ -43,12 +43,10 @@ } # [END howto_search_ads_env_variables] -default_args = {"start_date": dates.days_ago(1)} - with models.DAG( "example_search_ads", - default_args=default_args, - schedule_interval=None, # Override to match your needs + schedule_interval=None, # Override to match your needs, + start_date=dates.days_ago(1) ) as dag: # [START howto_search_ads_generate_report_operator] generate_report = GoogleSearchAdsInsertReportOperator( diff --git a/airflow/providers/google/marketing_platform/hooks/analytics.py b/airflow/providers/google/marketing_platform/hooks/analytics.py index d4d5228c0a708..546ad29b709c1 100644 --- a/airflow/providers/google/marketing_platform/hooks/analytics.py +++ b/airflow/providers/google/marketing_platform/hooks/analytics.py @@ -31,13 +31,11 @@ class GoogleAnalyticsHook(GoogleBaseHook): def __init__( self, api_version: str = "v3", - gcp_conn_id: str = "google_cloud_default", *args, **kwargs ): super().__init__(*args, **kwargs) self.api_version = api_version - self.gcp_connection_is = gcp_conn_id self._conn = None def _paginate(self, resource: Resource, list_args: Optional[Dict[str, Any]] = None): diff --git a/airflow/providers/google/marketing_platform/operators/analytics.py b/airflow/providers/google/marketing_platform/operators/analytics.py index 9564596b74852..8d019d23fb30d 100644 --- a/airflow/providers/google/marketing_platform/operators/analytics.py +++ b/airflow/providers/google/marketing_platform/operators/analytics.py @@ -55,13 +55,12 @@ class GoogleAnalyticsListAccountsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, api_version: str = "v3", gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.api_version = api_version self.gcp_conn_id = gcp_conn_id @@ -104,16 +103,15 @@ class GoogleAnalyticsGetAdsLinkOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, account_id: str, web_property_ad_words_link_id: str, web_property_id: str, api_version: str = "v3", gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.account_id = account_id self.web_property_ad_words_link_id = web_property_ad_words_link_id @@ -160,15 +158,14 @@ class GoogleAnalyticsRetrieveAdsLinksListOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, account_id: str, web_property_id: str, api_version: str = "v3", gcp_conn_id: str = "google_cloud_default", - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.account_id = account_id self.web_property_id = web_property_id @@ -216,7 +213,7 @@ class GoogleAnalyticsDataImportUploadOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, storage_bucket: str, storage_name_object: str, account_id: str, @@ -226,10 +223,9 @@ def __init__( gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, api_version: str = "v3", - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.storage_bucket = storage_bucket self.storage_name_object = storage_name_object self.account_id = account_id @@ -298,10 +294,9 @@ def __init__( gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, api_version: str = "v3", - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.account_id = account_id self.web_property_id = web_property_id @@ -364,11 +359,10 @@ def __init__( gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, custom_dimension_header_mapping: Optional[Dict[str, str]] = None, - *args, **kwargs ): super(GoogleAnalyticsModifyFileHeadersDataImportOperator, self).__init__( - *args, **kwargs + **kwargs ) self.storage_bucket = storage_bucket self.storage_name_object = storage_name_object diff --git a/airflow/providers/google/marketing_platform/operators/campaign_manager.py b/airflow/providers/google/marketing_platform/operators/campaign_manager.py index 1c8760ea7f7f6..ba2fedd615dd1 100644 --- a/airflow/providers/google/marketing_platform/operators/campaign_manager.py +++ b/airflow/providers/google/marketing_platform/operators/campaign_manager.py @@ -70,17 +70,16 @@ class GoogleCampaignManagerDeleteReportOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, profile_id: str, report_name: Optional[str] = None, report_id: Optional[str] = None, api_version: str = "v3.3", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if not (report_name or report_id): raise AirflowException("Please provide `report_name` or `report_id`.") if report_name and report_id: @@ -164,7 +163,7 @@ class GoogleCampaignManagerDownloadReportOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, profile_id: str, report_id: str, file_id: str, @@ -175,10 +174,9 @@ def __init__( # pylint: disable=too-many-arguments api_version: str = "v3.3", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.profile_id = profile_id self.report_id = report_id self.file_id = file_id @@ -283,16 +281,15 @@ class GoogleCampaignManagerInsertReportOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, profile_id: str, report: Dict[str, Any], api_version: str = "v3.3", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.profile_id = profile_id self.report = report self.api_version = api_version @@ -359,17 +356,16 @@ class GoogleCampaignManagerRunReportOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, profile_id: str, report_id: str, synchronous: bool = False, api_version: str = "v3.3", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.profile_id = profile_id self.report_id = report_id self.synchronous = synchronous @@ -442,7 +438,7 @@ class GoogleCampaignManagerBatchInsertConversionsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, profile_id: str, conversions: List[Dict[str, Any]], encryption_entity_type: str, @@ -452,10 +448,9 @@ def __init__( api_version: str = "v3.3", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.profile_id = profile_id self.conversions = conversions self.encryption_entity_type = encryption_entity_type @@ -530,7 +525,7 @@ class GoogleCampaignManagerBatchUpdateConversionsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, profile_id: str, conversions: List[Dict[str, Any]], encryption_entity_type: str, @@ -540,10 +535,9 @@ def __init__( api_version: str = "v3.3", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.profile_id = profile_id self.conversions = conversions self.encryption_entity_type = encryption_entity_type diff --git a/airflow/providers/google/marketing_platform/operators/display_video.py b/airflow/providers/google/marketing_platform/operators/display_video.py index 8f5f5b4a8597f..4e130b3675442 100644 --- a/airflow/providers/google/marketing_platform/operators/display_video.py +++ b/airflow/providers/google/marketing_platform/operators/display_video.py @@ -62,15 +62,14 @@ class GoogleDisplayVideo360CreateReportOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, body: Dict[str, Any], api_version: str = "v1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.body = body self.api_version = api_version self.gcp_conn_id = gcp_conn_id @@ -125,16 +124,15 @@ class GoogleDisplayVideo360DeleteReportOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, report_id: Optional[str] = None, report_name: Optional[str] = None, api_version: str = "v1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.report_id = report_id self.report_name = report_name self.api_version = api_version @@ -206,7 +204,7 @@ class GoogleDisplayVideo360DownloadReportOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, report_id: str, bucket_name: str, report_name: Optional[str] = None, @@ -215,10 +213,9 @@ def __init__( api_version: str = "v1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.report_id = report_id self.chunk_size = chunk_size self.gzip = gzip @@ -312,16 +309,15 @@ class GoogleDisplayVideo360RunReportOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, report_id: str, params: Dict[str, Any], api_version: str = "v1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.report_id = report_id self.params = params self.api_version = api_version @@ -364,7 +360,7 @@ class GoogleDisplayVideo360DownloadLineItemsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, request_body: Dict[str, Any], bucket_name: str, object_name: str, @@ -372,10 +368,9 @@ def __init__( api_version: str = "v1.1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.request_body = request_body self.object_name = object_name self.bucket_name = bucket_name @@ -440,16 +435,15 @@ class GoogleDisplayVideo360UploadLineItemsOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, bucket_name: str, object_name: str, api_version: str = "v1.1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.bucket_name = bucket_name self.object_name = object_name self.api_version = api_version @@ -512,15 +506,14 @@ class GoogleDisplayVideo360CreateSDFDownloadTaskOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, body_request: Dict[str, Any], api_version: str = "v1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.body_request = body_request self.api_version = api_version self.gcp_conn_id = gcp_conn_id @@ -576,7 +569,7 @@ class GoogleDisplayVideo360SDFtoGCSOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, operation_name: str, bucket_name: str, object_name: str, @@ -584,10 +577,9 @@ def __init__( api_version: str = "v1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.operation_name = operation_name self.bucket_name = bucket_name self.object_name = object_name diff --git a/airflow/providers/google/marketing_platform/operators/search_ads.py b/airflow/providers/google/marketing_platform/operators/search_ads.py index 4f2200f15a8fc..59bc2c7508c78 100644 --- a/airflow/providers/google/marketing_platform/operators/search_ads.py +++ b/airflow/providers/google/marketing_platform/operators/search_ads.py @@ -57,15 +57,14 @@ class GoogleSearchAdsInsertReportOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, report: Dict[str, Any], api_version: str = "v2", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.report = report self.api_version = api_version self.gcp_conn_id = gcp_conn_id @@ -125,7 +124,7 @@ class GoogleSearchAdsDownloadReportOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, report_id: str, bucket_name: str, report_name: Optional[str] = None, @@ -134,10 +133,9 @@ def __init__( api_version: str = "v2", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.report_id = report_id self.api_version = api_version self.gcp_conn_id = gcp_conn_id diff --git a/airflow/providers/google/marketing_platform/sensors/campaign_manager.py b/airflow/providers/google/marketing_platform/sensors/campaign_manager.py index b721c2f5bd3ee..cfdb039710f34 100644 --- a/airflow/providers/google/marketing_platform/sensors/campaign_manager.py +++ b/airflow/providers/google/marketing_platform/sensors/campaign_manager.py @@ -68,7 +68,7 @@ def poke(self, context: Dict) -> bool: @apply_defaults def __init__( - self, + self, *, profile_id: str, report_id: str, file_id: str, @@ -77,10 +77,9 @@ def __init__( delegate_to: Optional[str] = None, mode: str = "reschedule", poke_interval: int = 60 * 5, - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.mode = mode self.poke_interval = poke_interval self.profile_id = profile_id diff --git a/airflow/providers/google/marketing_platform/sensors/display_video.py b/airflow/providers/google/marketing_platform/sensors/display_video.py index 7306c6927dedc..0c28617097b02 100644 --- a/airflow/providers/google/marketing_platform/sensors/display_video.py +++ b/airflow/providers/google/marketing_platform/sensors/display_video.py @@ -47,15 +47,14 @@ class GoogleDisplayVideo360ReportSensor(BaseSensorOperator): template_fields = ("report_id",) def __init__( - self, + self, *, report_id: str, api_version: str = "v1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.report_id = report_id self.api_version = api_version diff --git a/airflow/providers/google/marketing_platform/sensors/search_ads.py b/airflow/providers/google/marketing_platform/sensors/search_ads.py index e96196e182ff9..49391e1f0f901 100644 --- a/airflow/providers/google/marketing_platform/sensors/search_ads.py +++ b/airflow/providers/google/marketing_platform/sensors/search_ads.py @@ -52,7 +52,7 @@ class GoogleSearchAdsReportSensor(BaseSensorOperator): @apply_defaults def __init__( - self, + self, *, report_id: str, api_version: str = "v2", gcp_conn_id: str = "google_cloud_default", diff --git a/airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py b/airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py index 0d838d4200770..997f2208c4eab 100644 --- a/airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py +++ b/airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py @@ -26,12 +26,10 @@ GCS_TO_GDRIVE_BUCKET = os.environ.get("GCS_TO_DRIVE_BUCKET", "example-object") -default_args = {"start_date": days_ago(1)} - with models.DAG( "example_gcs_to_gdrive", - default_args=default_args, - schedule_interval=None, # Override to match your needs + schedule_interval=None, # Override to match your needs, + start_date=days_ago(1), tags=['example'], ) as dag: # [START howto_operator_gcs_to_gdrive_copy_single_file] diff --git a/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py b/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py index 52430c295d7ce..1b5eefda33a55 100644 --- a/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py +++ b/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py @@ -27,11 +27,9 @@ SPREADSHEET_ID = os.environ.get("SPREADSHEET_ID", "example-spreadsheetID") NEW_SPREADSHEET_ID = os.environ.get("NEW_SPREADSHEET_ID", "1234567890qwerty") -default_args = {"start_date": days_ago(1)} - with models.DAG( "example_gcs_to_sheets", - default_args=default_args, + start_date=days_ago(1), schedule_interval=None, # Override to match your needs tags=["example"], ) as dag: diff --git a/airflow/providers/google/suite/example_dags/example_sheets.py b/airflow/providers/google/suite/example_dags/example_sheets.py index fcb266be75f20..171cdcfeb9032 100644 --- a/airflow/providers/google/suite/example_dags/example_sheets.py +++ b/airflow/providers/google/suite/example_dags/example_sheets.py @@ -34,12 +34,10 @@ "sheets": [{"properties": {"title": "Sheet1"}}], } -default_args = {"start_date": days_ago(1)} - with models.DAG( "example_sheets_gcs", - default_args=default_args, - schedule_interval=None, # Override to match your needs + schedule_interval=None, # Override to match your needs, + start_date=days_ago(1), tags=["example"], ) as dag: # [START upload_sheet_to_gcs] diff --git a/airflow/providers/google/suite/operators/sheets.py b/airflow/providers/google/suite/operators/sheets.py index 1dfe3dac61777..69cf8b0e5f554 100644 --- a/airflow/providers/google/suite/operators/sheets.py +++ b/airflow/providers/google/suite/operators/sheets.py @@ -19,6 +19,7 @@ from airflow.models import BaseOperator from airflow.providers.google.suite.hooks.sheets import GSheetsHook +from airflow.utils.decorators import apply_defaults class GoogleSheetsCreateSpreadsheetOperator(BaseOperator): @@ -40,15 +41,15 @@ class GoogleSheetsCreateSpreadsheetOperator(BaseOperator): template_fields = ["spreadsheet"] + @apply_defaults def __init__( - self, + self, *, spreadsheet: Dict[str, Any], gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.gcp_conn_id = gcp_conn_id self.spreadsheet = spreadsheet self.delegate_to = delegate_to diff --git a/airflow/providers/google/suite/transfers/gcs_to_gdrive.py b/airflow/providers/google/suite/transfers/gcs_to_gdrive.py index b3fa6e4552969..12088757b2718 100644 --- a/airflow/providers/google/suite/transfers/gcs_to_gdrive.py +++ b/airflow/providers/google/suite/transfers/gcs_to_gdrive.py @@ -78,17 +78,16 @@ class GCSToGoogleDriveOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, source_bucket: str, source_object: str, destination_object: Optional[str] = None, move_object: bool = False, gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.source_bucket = source_bucket self.source_object = source_object diff --git a/airflow/providers/google/suite/transfers/gcs_to_sheets.py b/airflow/providers/google/suite/transfers/gcs_to_sheets.py index 3dc455ee790ef..43dcba20eab82 100644 --- a/airflow/providers/google/suite/transfers/gcs_to_sheets.py +++ b/airflow/providers/google/suite/transfers/gcs_to_sheets.py @@ -22,6 +22,7 @@ from airflow.models import BaseOperator from airflow.providers.google.cloud.hooks.gcs import GCSHook from airflow.providers.google.suite.hooks.sheets import GSheetsHook +from airflow.utils.decorators import apply_defaults class GCSToGoogleSheetsOperator(BaseOperator): @@ -53,18 +54,18 @@ class GCSToGoogleSheetsOperator(BaseOperator): "spreadsheet_range", ] + @apply_defaults def __init__( - self, + self, *, spreadsheet_id: str, bucket_name: str, object_name: Optional[str] = None, spreadsheet_range: str = "Sheet1", gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, - *args, **kwargs, ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.gcp_conn_id = gcp_conn_id self.spreadsheet_id = spreadsheet_id diff --git a/airflow/providers/grpc/operators/grpc.py b/airflow/providers/grpc/operators/grpc.py index 107efbc9c3ea9..10ecd49e7635c 100644 --- a/airflow/providers/grpc/operators/grpc.py +++ b/airflow/providers/grpc/operators/grpc.py @@ -53,7 +53,7 @@ class GrpcOperator(BaseOperator): template_fields = ('stub_class', 'call_func', 'data') @apply_defaults - def __init__(self, + def __init__(self, *, stub_class: Callable, call_func: str, grpc_conn_id: str = "grpc_default", @@ -63,8 +63,8 @@ def __init__(self, streaming: bool = False, response_callback: Optional[Callable] = None, log_response: bool = False, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.stub_class = stub_class self.call_func = call_func self.grpc_conn_id = grpc_conn_id diff --git a/airflow/providers/http/example_dags/example_http.py b/airflow/providers/http/example_dags/example_http.py index 5328f83b82c07..435f7d2637839 100644 --- a/airflow/providers/http/example_dags/example_http.py +++ b/airflow/providers/http/example_dags/example_http.py @@ -29,7 +29,6 @@ default_args = { 'owner': 'airflow', 'depends_on_past': False, - 'start_date': days_ago(2), 'email': ['airflow@example.com'], 'email_on_failure': False, 'email_on_retry': False, @@ -37,7 +36,7 @@ 'retry_delay': timedelta(minutes=5), } -dag = DAG('example_http_operator', default_args=default_args, tags=['example']) +dag = DAG('example_http_operator', default_args=default_args, tags=['example'], start_date=days_ago(2)) dag.doc_md = __doc__ diff --git a/airflow/providers/http/operators/http.py b/airflow/providers/http/operators/http.py index 328ce27888a2e..7505ea2bfdc54 100644 --- a/airflow/providers/http/operators/http.py +++ b/airflow/providers/http/operators/http.py @@ -64,7 +64,7 @@ class SimpleHttpOperator(BaseOperator): ui_color = '#f4a460' @apply_defaults - def __init__(self, + def __init__(self, *, endpoint: Optional[str] = None, method: str = 'POST', data: Any = None, @@ -74,8 +74,8 @@ def __init__(self, extra_options: Optional[Dict[str, Any]] = None, http_conn_id: str = 'http_default', log_response: bool = False, - *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) + **kwargs: Any) -> None: + super().__init__(**kwargs) self.http_conn_id = http_conn_id self.method = method self.endpoint = endpoint diff --git a/airflow/providers/http/sensors/http.py b/airflow/providers/http/sensors/http.py index 05a9bffe43402..e730b566ada60 100644 --- a/airflow/providers/http/sensors/http.py +++ b/airflow/providers/http/sensors/http.py @@ -70,7 +70,7 @@ def response_check(response, task_instance): template_fields = ('endpoint', 'request_params') @apply_defaults - def __init__(self, + def __init__(self, *, endpoint: str, http_conn_id: str = 'http_default', method: str = 'GET', @@ -78,9 +78,8 @@ def __init__(self, headers: Optional[Dict[str, Any]] = None, response_check: Optional[Callable[..., Any]] = None, extra_options: Optional[Dict[str, Any]] = None, - *args: Any, **kwargs: Any - ) -> None: - super().__init__(*args, **kwargs) + **kwargs: Any) -> None: + super().__init__(**kwargs) self.endpoint = endpoint self.http_conn_id = http_conn_id self.request_params = request_params or {} diff --git a/airflow/providers/imap/hooks/imap.py b/airflow/providers/imap/hooks/imap.py index b7197bb7c1e63..60a46f6f80778 100644 --- a/airflow/providers/imap/hooks/imap.py +++ b/airflow/providers/imap/hooks/imap.py @@ -24,6 +24,7 @@ import imaplib import os import re +from typing import Any, Iterable, List, Optional, Tuple from airflow.exceptions import AirflowException from airflow.hooks.base_hook import BaseHook @@ -41,10 +42,10 @@ class ImapHook(BaseHook): :type imap_conn_id: str """ - def __init__(self, imap_conn_id='imap_default'): + def __init__(self, imap_conn_id: str = 'imap_default') -> None: super().__init__() self.imap_conn_id = imap_conn_id - self.mail_client = None + self.mail_client: Optional[imaplib.IMAP4_SSL] = None def __enter__(self): return self.get_conn() @@ -52,7 +53,7 @@ def __enter__(self): def __exit__(self, exc_type, exc_val, exc_tb): self.mail_client.logout() - def get_conn(self): + def get_conn(self) -> 'ImapHook': """ Login to the mail server. @@ -70,7 +71,12 @@ def get_conn(self): return self - def has_mail_attachment(self, name, *, check_regex=False, mail_folder='INBOX', mail_filter='All'): + def has_mail_attachment(self, + name: str, + *, + check_regex: bool = False, + mail_folder: str = 'INBOX', + mail_filter: str = 'All') -> bool: """ Checks the mail folder for mails containing attachments with the given name. @@ -94,13 +100,13 @@ def has_mail_attachment(self, name, *, check_regex=False, mail_folder='INBOX', m return len(mail_attachments) > 0 def retrieve_mail_attachments(self, - name, + name: str, *, - check_regex=False, - latest_only=False, - mail_folder='INBOX', - mail_filter='All', - not_found_mode='raise'): + check_regex: bool = False, + latest_only: bool = False, + mail_folder: str = 'INBOX', + mail_filter: str = 'All', + not_found_mode: str = 'raise') -> List[Tuple]: """ Retrieves mail's attachments in the mail folder by its name. @@ -136,14 +142,14 @@ def retrieve_mail_attachments(self, return mail_attachments def download_mail_attachments(self, - name, - local_output_directory, + name: str, + local_output_directory: str, *, - check_regex=False, - latest_only=False, - mail_folder='INBOX', - mail_filter='All', - not_found_mode='raise'): + check_regex: bool = False, + latest_only: bool = False, + mail_folder: str = 'INBOX', + mail_filter: str = 'All', + not_found_mode: str = 'raise'): """ Downloads mail's attachments in the mail folder by its name to the local directory. @@ -179,7 +185,7 @@ def download_mail_attachments(self, self._create_files(mail_attachments, local_output_directory) - def _handle_not_found_mode(self, not_found_mode): + def _handle_not_found_mode(self, not_found_mode: str): if not_found_mode == 'raise': raise AirflowException('No mail attachments found!') if not_found_mode == 'warn': @@ -189,7 +195,11 @@ def _handle_not_found_mode(self, not_found_mode): else: self.log.error('Invalid "not_found_mode" %s', not_found_mode) - def _retrieve_mails_attachments_by_name(self, name, check_regex, latest_only, mail_folder, mail_filter): + def _retrieve_mails_attachments_by_name(self, name: str, check_regex: bool, latest_only: bool, + mail_folder: str, mail_filter: str) -> List: + if not self.mail_client: + raise Exception("The 'mail_client' should be initialized before!") + all_matching_attachments = [] self.mail_client.select(mail_folder) @@ -207,24 +217,29 @@ def _retrieve_mails_attachments_by_name(self, name, check_regex, latest_only, ma return all_matching_attachments - def _list_mail_ids_desc(self, mail_filter): + def _list_mail_ids_desc(self, mail_filter: str) -> Iterable[str]: + if not self.mail_client: + raise Exception("The 'mail_client' should be initialized before!") _, data = self.mail_client.search(None, mail_filter) mail_ids = data[0].split() return reversed(mail_ids) - def _fetch_mail_body(self, mail_id): + def _fetch_mail_body(self, mail_id: str) -> str: + if not self.mail_client: + raise Exception("The 'mail_client' should be initialized before!") _, data = self.mail_client.fetch(mail_id, '(RFC822)') - mail_body = data[0][1] # The mail body is always in this specific location - mail_body_str = mail_body.decode('utf-8') + mail_body = data[0][1] # type: ignore # The mail body is always in this specific location + mail_body_str = mail_body.decode('utf-8') # type: ignore return mail_body_str - def _check_mail_body(self, response_mail_body, name, check_regex, latest_only): + def _check_mail_body(self, response_mail_body: str, name: str, check_regex: bool, + latest_only: bool) -> List[Tuple[Any, Any]]: mail = Mail(response_mail_body) if mail.has_attachments(): return mail.get_attachments_by_name(name, check_regex, find_first=latest_only) return [] - def _create_files(self, mail_attachments, local_output_directory): + def _create_files(self, mail_attachments: List, local_output_directory: str): for name, payload in mail_attachments: if self._is_symlink(name): self.log.error('Can not create file because it is a symlink!') @@ -233,19 +248,19 @@ def _create_files(self, mail_attachments, local_output_directory): else: self._create_file(name, payload, local_output_directory) - def _is_symlink(self, name): + def _is_symlink(self, name: str): # IMPORTANT NOTE: os.path.islink is not working for windows symlinks # See: https://stackoverflow.com/a/11068434 return os.path.islink(name) - def _is_escaping_current_directory(self, name): + def _is_escaping_current_directory(self, name: str): return '../' in name - def _correct_path(self, name, local_output_directory): + def _correct_path(self, name: str, local_output_directory: str): return local_output_directory + name if local_output_directory.endswith('/') \ else local_output_directory + '/' + name - def _create_file(self, name, payload, local_output_directory): + def _create_file(self, name: str, payload: Any, local_output_directory: str): file_path = self._correct_path(name, local_output_directory) with open(file_path, 'wb') as file: @@ -260,11 +275,11 @@ class Mail(LoggingMixin): :type mail_body: str """ - def __init__(self, mail_body): + def __init__(self, mail_body: str) -> None: super().__init__() self.mail = email.message_from_string(mail_body) - def has_attachments(self): + def has_attachments(self) -> bool: """ Checks the mail for a attachments. @@ -273,7 +288,10 @@ def has_attachments(self): """ return self.mail.get_content_maintype() == 'multipart' - def get_attachments_by_name(self, name, check_regex, find_first=False): + def get_attachments_by_name(self, + name: str, + check_regex: bool, + find_first: bool = False) -> List[Tuple[Any, Any]]: """ Gets all attachments by name for the mail. @@ -301,7 +319,7 @@ def get_attachments_by_name(self, name, check_regex, find_first=False): return attachments - def _iterate_attachments(self): + def _iterate_attachments(self) -> Iterable['MailPart']: for part in self.mail.walk(): mail_part = MailPart(part) if mail_part.is_attachment(): @@ -316,10 +334,10 @@ class MailPart: :type part: any """ - def __init__(self, part): + def __init__(self, part: Any) -> None: self.part = part - def is_attachment(self): + def is_attachment(self) -> bool: """ Checks if the part is a valid mail attachment. @@ -328,7 +346,7 @@ def is_attachment(self): """ return self.part.get_content_maintype() != 'multipart' and self.part.get('Content-Disposition') - def has_matching_name(self, name): + def has_matching_name(self, name: str) -> Optional[Tuple[Any, Any]]: """ Checks if the given name matches the part's name. @@ -337,9 +355,9 @@ def has_matching_name(self, name): :returns: True if it matches the name (including regular expression). :rtype: tuple """ - return re.match(name, self.part.get_filename()) + return re.match(name, self.part.get_filename()) # type: ignore - def has_equal_name(self, name): + def has_equal_name(self, name: str) -> bool: """ Checks if the given name is equal to the part's name. @@ -350,7 +368,7 @@ def has_equal_name(self, name): """ return self.part.get_filename() == name - def get_file(self): + def get_file(self) -> Tuple: """ Gets the file including name and payload. diff --git a/airflow/providers/imap/sensors/imap_attachment.py b/airflow/providers/imap/sensors/imap_attachment.py index c02badde4be73..4468df35e750e 100644 --- a/airflow/providers/imap/sensors/imap_attachment.py +++ b/airflow/providers/imap/sensors/imap_attachment.py @@ -44,15 +44,14 @@ class ImapAttachmentSensor(BaseSensorOperator): template_fields = ('attachment_name', 'mail_filter') @apply_defaults - def __init__(self, + def __init__(self, *, attachment_name, check_regex=False, mail_folder='INBOX', mail_filter='All', conn_id='imap_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.attachment_name = attachment_name self.check_regex = check_regex diff --git a/airflow/providers/jdbc/operators/jdbc.py b/airflow/providers/jdbc/operators/jdbc.py index b973f3fb3cc50..1590d903bd81e 100644 --- a/airflow/providers/jdbc/operators/jdbc.py +++ b/airflow/providers/jdbc/operators/jdbc.py @@ -46,13 +46,13 @@ class JdbcOperator(BaseOperator): ui_color = '#ededed' @apply_defaults - def __init__(self, + def __init__(self, *, sql: str, jdbc_conn_id: str = 'jdbc_default', autocommit: bool = False, parameters: Optional[Union[Mapping, Iterable]] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.parameters = parameters self.sql = sql self.jdbc_conn_id = jdbc_conn_id diff --git a/airflow/providers/jenkins/example_dags/example_jenkins_job_trigger.py b/airflow/providers/jenkins/example_dags/example_jenkins_job_trigger.py index 07ecf5e55a16e..5a711b25f8f93 100644 --- a/airflow/providers/jenkins/example_dags/example_jenkins_job_trigger.py +++ b/airflow/providers/jenkins/example_dags/example_jenkins_job_trigger.py @@ -24,10 +24,8 @@ from airflow.providers.jenkins.hooks.jenkins import JenkinsHook from airflow.providers.jenkins.operators.jenkins_job_trigger import JenkinsJobTriggerOperator -datetime_start_date = datetime(2017, 6, 1) default_args = { "owner": "airflow", - "start_date": datetime_start_date, "retries": 1, "retry_delay": timedelta(minutes=5), "depends_on_past": False, @@ -40,6 +38,7 @@ with DAG( "test_jenkins", default_args=default_args, + start_date=datetime(2017, 6, 1), schedule_interval=None ) as dag: job_trigger = JenkinsJobTriggerOperator( diff --git a/airflow/providers/jenkins/hooks/jenkins.py b/airflow/providers/jenkins/hooks/jenkins.py index e54739003223b..a3910d159db16 100644 --- a/airflow/providers/jenkins/hooks/jenkins.py +++ b/airflow/providers/jenkins/hooks/jenkins.py @@ -29,7 +29,7 @@ class JenkinsHook(BaseHook): Hook to manage connection to jenkins server """ - def __init__(self, conn_id='jenkins_default'): + def __init__(self, conn_id: str = 'jenkins_default') -> None: super().__init__() connection = self.get_connection(conn_id) self.connection = connection @@ -45,7 +45,7 @@ def __init__(self, conn_id='jenkins_default'): self.log.info('Trying to connect to %s', url) self.jenkins_server = jenkins.Jenkins(url, connection.login, connection.password) - def get_jenkins_server(self): + def get_jenkins_server(self) -> jenkins.Jenkins: """ Get jenkins server """ diff --git a/airflow/providers/jenkins/operators/jenkins_job_trigger.py b/airflow/providers/jenkins/operators/jenkins_job_trigger.py index 434279c6d7127..471abd43d37e2 100644 --- a/airflow/providers/jenkins/operators/jenkins_job_trigger.py +++ b/airflow/providers/jenkins/operators/jenkins_job_trigger.py @@ -16,13 +16,15 @@ # specific language governing permissions and limitations # under the License. +import ast import json import socket import time +from typing import Any, Dict, List, Mapping, Optional, Union from urllib.error import HTTPError, URLError import jenkins -from jenkins import JenkinsException +from jenkins import Jenkins, JenkinsException from requests import Request from airflow.exceptions import AirflowException @@ -30,8 +32,11 @@ from airflow.providers.jenkins.hooks.jenkins import JenkinsHook from airflow.utils.decorators import apply_defaults +JenkinsRequest = Mapping[str, Any] +ParamType = Optional[Union[str, Dict, List]] -def jenkins_request_with_headers(jenkins_server, req): + +def jenkins_request_with_headers(jenkins_server: Jenkins, req: Request) -> Optional[JenkinsRequest]: """ We need to get the headers in addition to the body answer to get the location from them @@ -56,7 +61,7 @@ def jenkins_request_with_headers(jenkins_server, req): # Jenkins's funky authentication means its nigh impossible to distinguish errors. if e.code in [401, 403, 500]: raise JenkinsException( - 'Error in request. Possibly authentication failed [%s]: %s' % (e.code, e.msg) + 'Error in request. Possibly authentication failed [%s]: %s' % (e.code, e.reason) ) elif e.code == 404: raise jenkins.NotFoundException('Requested item could not be found') @@ -66,6 +71,7 @@ def jenkins_request_with_headers(jenkins_server, req): raise jenkins.TimeoutException('Error in request: %s' % e) except URLError as e: raise JenkinsException('Error in request: %s' % e.reason) + return None class JenkinsJobTriggerOperator(BaseOperator): @@ -79,8 +85,9 @@ class JenkinsJobTriggerOperator(BaseOperator): :type jenkins_connection_id: str :param job_name: The name of the job to trigger :type job_name: str - :param parameters: The parameters block to provide to jenkins. (templated) - :type parameters: str + :param parameters: The parameters block provided to jenkins for use in + the API call when triggering a build. (templated) + :type parameters: str, Dict, or List :param sleep_time: How long will the operator sleep between each status request for the job (min 1, default 10) :type sleep_time: int @@ -93,15 +100,14 @@ class JenkinsJobTriggerOperator(BaseOperator): ui_color = '#f9ec86' @apply_defaults - def __init__(self, - jenkins_connection_id, - job_name, - parameters="", - sleep_time=10, - max_try_before_job_appears=10, - *args, + def __init__(self, *, + jenkins_connection_id: str, + job_name: str, + parameters: ParamType = "", + sleep_time: int = 10, + max_try_before_job_appears: int = 10, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.job_name = job_name self.parameters = parameters if sleep_time < 1: @@ -110,7 +116,9 @@ def __init__(self, self.jenkins_connection_id = jenkins_connection_id self.max_try_before_job_appears = max_try_before_job_appears - def build_job(self, jenkins_server): + def build_job(self, + jenkins_server: Jenkins, + params: ParamType = "") -> Optional[JenkinsRequest]: """ This function makes an API call to Jenkins to trigger a build for 'job_name' It returned a dict with 2 keys : body and headers. @@ -118,25 +126,25 @@ def build_job(self, jenkins_server): the location to poll in the queue. :param jenkins_server: The jenkins server where the job should be triggered + :param params: The parameters block to provide to jenkins API call. :return: Dict containing the response body (key body) and the headers coming along (headers) """ - # Warning if the parameter is too long, the URL can be longer than - # the maximum allowed size - if self.parameters and isinstance(self.parameters, str): - import ast - self.parameters = ast.literal_eval(self.parameters) + # Since params can be either JSON string, dictionary, or list, + # check type and pass to build_job_url + if params and isinstance(params, str): + params = ast.literal_eval(params) - if not self.parameters: - # We need a None to call the non parametrized jenkins api end point - self.parameters = None + # We need a None to call the non-parametrized jenkins api end point + if not params: + params = None request = Request( method='POST', - url=jenkins_server.build_job_url(self.job_name, self.parameters, None)) + url=jenkins_server.build_job_url(self.job_name, params, None)) return jenkins_request_with_headers(jenkins_server, request) - def poll_job_in_queue(self, location, jenkins_server): + def poll_job_in_queue(self, location: str, jenkins_server: Jenkins) -> int: """ This method poll the jenkins queue until the job is executed. When we trigger a job through an API call, @@ -171,13 +179,13 @@ def poll_job_in_queue(self, location, jenkins_server): raise AirflowException("The job hasn't been executed after polling " f"the queue {self.max_try_before_job_appears} times") - def get_hook(self): + def get_hook(self) -> JenkinsHook: """ Instantiate jenkins hook """ return JenkinsHook(self.jenkins_connection_id) - def execute(self, context): + def execute(self, context: Mapping[Any, Any]) -> Optional[str]: if not self.jenkins_connection_id: self.log.error( 'Please specify the jenkins connection id to use.' @@ -195,9 +203,10 @@ def execute(self, context): 'Triggering the job %s on the jenkins : %s with the parameters : %s', self.job_name, self.jenkins_connection_id, self.parameters) jenkins_server = self.get_hook().get_jenkins_server() - jenkins_response = self.build_job(jenkins_server) - build_number = self.poll_job_in_queue( - jenkins_response['headers']['Location'], jenkins_server) + jenkins_response = self.build_job(jenkins_server, self.parameters) + if jenkins_response: + build_number = self.poll_job_in_queue( + jenkins_response['headers']['Location'], jenkins_server) time.sleep(self.sleep_time) keep_polling_job = True @@ -235,3 +244,4 @@ def execute(self, context): # If we can we return the url of the job # for later use (like retrieving an artifact) return build_info['url'] + return None diff --git a/airflow/providers/jira/hooks/jira.py b/airflow/providers/jira/hooks/jira.py index b36c65cac6c69..3afc9ae9dc6f9 100644 --- a/airflow/providers/jira/hooks/jira.py +++ b/airflow/providers/jira/hooks/jira.py @@ -16,6 +16,8 @@ # specific language governing permissions and limitations # under the License. """Hook for JIRA""" +from typing import Any, Optional + from jira import JIRA from jira.exceptions import JIRAError @@ -31,15 +33,15 @@ class JiraHook(BaseHook): :type jira_conn_id: str """ def __init__(self, - jira_conn_id='jira_default', - proxies=None): + jira_conn_id: str = 'jira_default', + proxies: Optional[Any] = None) -> None: super().__init__() self.jira_conn_id = jira_conn_id self.proxies = proxies self.client = None self.get_conn() - def get_conn(self): + def get_conn(self) -> JIRA: if not self.client: self.log.debug('Creating Jira client for conn_id: %s', self.jira_conn_id) diff --git a/airflow/providers/jira/operators/jira.py b/airflow/providers/jira/operators/jira.py index 07775af9e45f3..0ba7f7565b9a4 100644 --- a/airflow/providers/jira/operators/jira.py +++ b/airflow/providers/jira/operators/jira.py @@ -16,6 +16,7 @@ # specific language governing permissions and limitations # under the License. +from typing import Any, Callable, Dict, Optional from airflow.exceptions import AirflowException from airflow.models import BaseOperator @@ -44,22 +45,21 @@ class JiraOperator(BaseOperator): template_fields = ("jira_method_args",) @apply_defaults - def __init__(self, - jira_conn_id='jira_default', - jira_method=None, - jira_method_args=None, - result_processor=None, - get_jira_resource_method=None, - *args, - **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, *, + jira_method: str, + jira_conn_id: str = 'jira_default', + jira_method_args: Optional[dict] = None, + result_processor: Optional[Callable] = None, + get_jira_resource_method: Optional[Callable] = None, + **kwargs) -> None: + super().__init__(**kwargs) self.jira_conn_id = jira_conn_id self.method_name = jira_method self.jira_method_args = jira_method_args self.result_processor = result_processor self.get_jira_resource_method = get_jira_resource_method - def execute(self, context): + def execute(self, context: Dict) -> Any: try: if self.get_jira_resource_method is not None: # if get_jira_resource_method is provided, jira_method will be executed on diff --git a/airflow/providers/jira/sensors/jira.py b/airflow/providers/jira/sensors/jira.py index e136e5be3400c..b3d8ca250e831 100644 --- a/airflow/providers/jira/sensors/jira.py +++ b/airflow/providers/jira/sensors/jira.py @@ -15,7 +15,9 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from jira.resources import Resource +from typing import Any, Callable, Dict, Optional + +from jira.resources import Issue, Resource from airflow.providers.jira.operators.jira import JIRAError, JiraOperator from airflow.sensors.base_sensor_operator import BaseSensorOperator @@ -37,14 +39,13 @@ class JiraSensor(BaseSensorOperator): """ @apply_defaults - def __init__(self, - jira_conn_id='jira_default', - method_name=None, - method_params=None, - result_processor=None, - *args, - **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, *, + method_name: str, + jira_conn_id: str = 'jira_default', + method_params: Optional[dict] = None, + result_processor: Optional[Callable] = None, + **kwargs) -> None: + super().__init__(**kwargs) self.jira_conn_id = jira_conn_id self.result_processor = None if result_processor is not None: @@ -57,7 +58,7 @@ def __init__(self, jira_method_args=self.method_params, result_processor=self.result_processor) - def poke(self, context): + def poke(self, context: Dict) -> Any: return self.jira_operator.execute(context=context) @@ -80,14 +81,13 @@ class JiraTicketSensor(JiraSensor): template_fields = ("ticket_id",) @apply_defaults - def __init__(self, - jira_conn_id='jira_default', - ticket_id=None, - field=None, - expected_value=None, - field_checker_func=None, - *args, - **kwargs): + def __init__(self, *, + jira_conn_id: str = 'jira_default', + ticket_id: Optional[str] = None, + field: Optional[str] = None, + expected_value: Optional[str] = None, + field_checker_func: Optional[Callable] = None, + **kwargs) -> None: self.jira_conn_id = jira_conn_id self.ticket_id = ticket_id @@ -98,10 +98,9 @@ def __init__(self, super().__init__(jira_conn_id=jira_conn_id, result_processor=field_checker_func, - *args, **kwargs) - def poke(self, context): + def poke(self, context: Dict) -> Any: self.log.info('Jira Sensor checking for change in ticket: %s', self.ticket_id) self.jira_operator.method_name = "issue" @@ -111,7 +110,7 @@ def poke(self, context): } return JiraSensor.poke(self, context=context) - def issue_field_checker(self, issue): + def issue_field_checker(self, issue: Issue) -> Optional[bool]: """Check issue using different conditions to prepare to evaluate sensor.""" result = None try: # pylint: disable=too-many-nested-blocks diff --git a/airflow/providers/microsoft/azure/example_dags/example_azure_container_instances.py b/airflow/providers/microsoft/azure/example_dags/example_azure_container_instances.py index a3e009e03bf2b..06121394a49fd 100644 --- a/airflow/providers/microsoft/azure/example_dags/example_azure_container_instances.py +++ b/airflow/providers/microsoft/azure/example_dags/example_azure_container_instances.py @@ -28,7 +28,6 @@ default_args = { 'owner': 'airflow', 'depends_on_past': False, - 'start_date': datetime(2018, 11, 1), 'email': ['airflow@example.com'], 'email_on_failure': False, 'email_on_retry': False, @@ -40,6 +39,7 @@ dag_id='aci_example', default_args=default_args, schedule_interval=timedelta(1), + start_date=datetime(2018, 11, 1), tags=['example'], ) as dag: diff --git a/airflow/providers/microsoft/azure/example_dags/example_azure_cosmosdb.py b/airflow/providers/microsoft/azure/example_dags/example_azure_cosmosdb.py index 5cf8947839daf..77740cff3cdef 100644 --- a/airflow/providers/microsoft/azure/example_dags/example_azure_cosmosdb.py +++ b/airflow/providers/microsoft/azure/example_dags/example_azure_cosmosdb.py @@ -34,7 +34,6 @@ default_args = { 'owner': 'airflow', 'depends_on_past': False, - 'start_date': dates.days_ago(2), 'email': ['airflow@example.com'], 'email_on_failure': False, 'email_on_retry': False @@ -43,6 +42,7 @@ with DAG( dag_id='example_azure_cosmosdb_sensor', default_args=default_args, + start_date=dates.days_ago(2), doc_md=__doc__, tags=['example'], ) as dag: diff --git a/airflow/providers/microsoft/azure/hooks/wasb.py b/airflow/providers/microsoft/azure/hooks/wasb.py index 32f9b57d17546..3c91cf330ec48 100644 --- a/airflow/providers/microsoft/azure/hooks/wasb.py +++ b/airflow/providers/microsoft/azure/hooks/wasb.py @@ -88,6 +88,23 @@ def check_for_prefix(self, container_name, prefix, **kwargs): num_results=1, **kwargs) return len(list(matches)) > 0 + def get_blobs_list(self, container_name: str, prefix: str, **kwargs): + """ + Return a list of blobs from path defined in prefix param + + :param container_name: Name of the container. + :type container_name: str + :param prefix: Prefix of the blob. + :type prefix: str + :param kwargs: Optional keyword arguments that + `BlockBlobService.list_blobs()` takes (num_results, include, + delimiter, marker, timeout) + :type kwargs: object + :return: List of blobs. + :rtype: list(azure.storage.common.models.ListGenerator) + """ + return self.connection.list_blobs(container_name, prefix, **kwargs) + def load_file(self, file_path, container_name, blob_name, **kwargs): """ Upload a file to Azure Blob Storage. diff --git a/airflow/providers/microsoft/azure/operators/adls_list.py b/airflow/providers/microsoft/azure/operators/adls_list.py index 3c77459db5971..576a2d8b150d8 100644 --- a/airflow/providers/microsoft/azure/operators/adls_list.py +++ b/airflow/providers/microsoft/azure/operators/adls_list.py @@ -50,12 +50,11 @@ class AzureDataLakeStorageListOperator(BaseOperator): ui_color = '#901dd2' @apply_defaults - def __init__(self, + def __init__(self, *, path: str, azure_data_lake_conn_id: str = 'azure_data_lake_default', - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.path = path self.azure_data_lake_conn_id = azure_data_lake_conn_id diff --git a/airflow/providers/microsoft/azure/operators/adx.py b/airflow/providers/microsoft/azure/operators/adx.py index ebd1e952d2bc5..eb4c8e9117e58 100644 --- a/airflow/providers/microsoft/azure/operators/adx.py +++ b/airflow/providers/microsoft/azure/operators/adx.py @@ -48,14 +48,13 @@ class AzureDataExplorerQueryOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, query: str, database: str, options: Optional[Dict] = None, azure_data_explorer_conn_id: str = 'azure_data_explorer_default', - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.query = query self.database = database self.options = options diff --git a/airflow/providers/microsoft/azure/operators/azure_batch.py b/airflow/providers/microsoft/azure/operators/azure_batch.py index ff3f35af7fd79..6cf080f907e14 100644 --- a/airflow/providers/microsoft/azure/operators/azure_batch.py +++ b/airflow/providers/microsoft/azure/operators/azure_batch.py @@ -148,7 +148,7 @@ class AzureBatchOperator(BaseOperator): ui_color = '#f0f0e4' @apply_defaults - def __init__(self, # pylint: disable=too-many-arguments,too-many-locals + def __init__(self, *, # pylint: disable=too-many-arguments,too-many-locals batch_pool_id: str, batch_pool_vm_size: str, batch_job_id: str, @@ -178,10 +178,9 @@ def __init__(self, # pylint: disable=too-many-arguments,too-many-locals timeout: int = 25, should_delete_job: bool = False, should_delete_pool: bool = False, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.batch_pool_id = batch_pool_id self.batch_pool_vm_size = batch_pool_vm_size self.batch_job_id = batch_job_id diff --git a/airflow/providers/microsoft/azure/operators/azure_container_instances.py b/airflow/providers/microsoft/azure/operators/azure_container_instances.py index 06f2a34ed2bf9..9f4b407eb693e 100644 --- a/airflow/providers/microsoft/azure/operators/azure_container_instances.py +++ b/airflow/providers/microsoft/azure/operators/azure_container_instances.py @@ -122,7 +122,7 @@ class AzureContainerInstancesOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults - def __init__(self, + def __init__(self, *, ci_conn_id: str, registry_conn_id: Optional[str], resource_group: str, @@ -139,9 +139,8 @@ def __init__(self, remove_on_error: bool = True, fail_if_exists: bool = True, tags: Optional[Dict[str, str]] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.ci_conn_id = ci_conn_id self.resource_group = resource_group diff --git a/airflow/providers/microsoft/azure/operators/azure_cosmos.py b/airflow/providers/microsoft/azure/operators/azure_cosmos.py index 10bfc2423133b..a206f25cc0868 100644 --- a/airflow/providers/microsoft/azure/operators/azure_cosmos.py +++ b/airflow/providers/microsoft/azure/operators/azure_cosmos.py @@ -40,14 +40,13 @@ class AzureCosmosInsertDocumentOperator(BaseOperator): ui_color = '#e4f0e8' @apply_defaults - def __init__(self, + def __init__(self, *, database_name: str, collection_name: str, document: dict, azure_cosmos_conn_id: str = 'azure_cosmos_default', - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.database_name = database_name self.collection_name = collection_name self.document = document diff --git a/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py b/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py index b669e63ca113b..7779a60222c1f 100644 --- a/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py +++ b/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py @@ -45,16 +45,15 @@ class WasbDeleteBlobOperator(BaseOperator): template_fields = ('container_name', 'blob_name') @apply_defaults - def __init__(self, + def __init__(self, *, container_name: str, blob_name: str, wasb_conn_id: str = 'wasb_default', check_options: Any = None, is_prefix: bool = False, ignore_if_missing: bool = False, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if check_options is None: check_options = {} self.wasb_conn_id = wasb_conn_id diff --git a/airflow/providers/microsoft/azure/sensors/azure_cosmos.py b/airflow/providers/microsoft/azure/sensors/azure_cosmos.py index d7f2616366af1..7235b94e86918 100644 --- a/airflow/providers/microsoft/azure/sensors/azure_cosmos.py +++ b/airflow/providers/microsoft/azure/sensors/azure_cosmos.py @@ -46,14 +46,13 @@ class AzureCosmosDocumentSensor(BaseSensorOperator): @apply_defaults def __init__( - self, + self, *, database_name: str, collection_name: str, document_id: str, azure_cosmos_conn_id: str = "azure_cosmos_default", - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.azure_cosmos_conn_id = azure_cosmos_conn_id self.database_name = database_name self.collection_name = collection_name diff --git a/airflow/providers/microsoft/azure/sensors/wasb.py b/airflow/providers/microsoft/azure/sensors/wasb.py index f689ed005e52e..033b89fe154cd 100644 --- a/airflow/providers/microsoft/azure/sensors/wasb.py +++ b/airflow/providers/microsoft/azure/sensors/wasb.py @@ -41,14 +41,13 @@ class WasbBlobSensor(BaseSensorOperator): template_fields = ('container_name', 'blob_name') @apply_defaults - def __init__(self, + def __init__(self, *, container_name: str, blob_name: str, wasb_conn_id: str = 'wasb_default', check_options: Optional[dict] = None, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if check_options is None: check_options = {} self.wasb_conn_id = wasb_conn_id @@ -83,14 +82,13 @@ class WasbPrefixSensor(BaseSensorOperator): template_fields = ('container_name', 'prefix') @apply_defaults - def __init__(self, + def __init__(self, *, container_name: str, prefix: str, wasb_conn_id: str = 'wasb_default', check_options: Optional[dict] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if check_options is None: check_options = {} self.wasb_conn_id = wasb_conn_id diff --git a/airflow/providers/microsoft/azure/transfers/file_to_wasb.py b/airflow/providers/microsoft/azure/transfers/file_to_wasb.py index 64cdbc0c504c5..ac1b415f2ceb1 100644 --- a/airflow/providers/microsoft/azure/transfers/file_to_wasb.py +++ b/airflow/providers/microsoft/azure/transfers/file_to_wasb.py @@ -42,15 +42,14 @@ class FileToWasbOperator(BaseOperator): template_fields = ('file_path', 'container_name', 'blob_name') @apply_defaults - def __init__(self, + def __init__(self, *, file_path: str, container_name: str, blob_name: str, wasb_conn_id: str = 'wasb_default', load_options: Optional[dict] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if load_options is None: load_options = {} self.file_path = file_path diff --git a/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py b/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py index 65c128c3b692f..a83c1ae727da5 100644 --- a/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py +++ b/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py @@ -62,7 +62,7 @@ class OracleToAzureDataLakeOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults def __init__( - self, + self, *, filename: str, azure_data_lake_conn_id: str, azure_data_lake_path: str, @@ -73,9 +73,8 @@ def __init__( encoding: str = "utf-8", quotechar: str = '"', quoting: str = csv.QUOTE_MINIMAL, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if sql_params is None: sql_params = {} self.filename = filename diff --git a/airflow/providers/microsoft/mssql/operators/mssql.py b/airflow/providers/microsoft/mssql/operators/mssql.py index 88f6146670d3e..6b8f4d9396590 100644 --- a/airflow/providers/microsoft/mssql/operators/mssql.py +++ b/airflow/providers/microsoft/mssql/operators/mssql.py @@ -53,15 +53,15 @@ class MsSqlOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, sql: str, mssql_conn_id: str = 'mssql_default', parameters: Optional[Union[Mapping, Iterable]] = None, autocommit: bool = False, database: Optional[str] = None, - *args, **kwargs + **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.mssql_conn_id = mssql_conn_id self.sql = sql self.parameters = parameters diff --git a/airflow/providers/microsoft/winrm/example_dags/example_winrm.py b/airflow/providers/microsoft/winrm/example_dags/example_winrm.py index ecf32e0f317ad..85b4038fb77f9 100644 --- a/airflow/providers/microsoft/winrm/example_dags/example_winrm.py +++ b/airflow/providers/microsoft/winrm/example_dags/example_winrm.py @@ -38,13 +38,13 @@ default_args = { 'owner': 'airflow', - 'start_date': days_ago(2) } with DAG( dag_id='POC_winrm_parallel', default_args=default_args, schedule_interval='0 0 * * *', + start_date=days_ago(2), dagrun_timeout=timedelta(minutes=60), tags=['example'], ) as dag: diff --git a/airflow/providers/microsoft/winrm/operators/winrm.py b/airflow/providers/microsoft/winrm/operators/winrm.py index 53e80ebc4a5be..7afbcfbe72016 100644 --- a/airflow/providers/microsoft/winrm/operators/winrm.py +++ b/airflow/providers/microsoft/winrm/operators/winrm.py @@ -51,15 +51,14 @@ class WinRMOperator(BaseOperator): template_fields = ('command',) @apply_defaults - def __init__(self, + def __init__(self, *, winrm_hook=None, ssh_conn_id=None, remote_host=None, command=None, timeout=10, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.winrm_hook = winrm_hook self.ssh_conn_id = ssh_conn_id self.remote_host = remote_host diff --git a/airflow/providers/mongo/sensors/mongo.py b/airflow/providers/mongo/sensors/mongo.py index b078699450c97..58a0e893b6a68 100644 --- a/airflow/providers/mongo/sensors/mongo.py +++ b/airflow/providers/mongo/sensors/mongo.py @@ -41,13 +41,12 @@ class MongoSensor(BaseSensorOperator): template_fields = ('collection', 'query') @apply_defaults - def __init__(self, + def __init__(self, *, collection: str, query: dict, mongo_conn_id: str = "mongo_default", - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.mongo_conn_id = mongo_conn_id self.collection = collection self.query = query diff --git a/airflow/providers/mysql/operators/mysql.py b/airflow/providers/mysql/operators/mysql.py index 8dbf24fd38e4c..256f577d08bad 100644 --- a/airflow/providers/mysql/operators/mysql.py +++ b/airflow/providers/mysql/operators/mysql.py @@ -48,14 +48,14 @@ class MySqlOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, sql: str, mysql_conn_id: str = 'mysql_default', parameters: Optional[Union[Mapping, Iterable]] = None, autocommit: bool = False, database: Optional[str] = None, - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.mysql_conn_id = mysql_conn_id self.sql = sql self.autocommit = autocommit diff --git a/airflow/providers/mysql/transfers/presto_to_mysql.py b/airflow/providers/mysql/transfers/presto_to_mysql.py index 156ff319d2138..bf9f9422acbac 100644 --- a/airflow/providers/mysql/transfers/presto_to_mysql.py +++ b/airflow/providers/mysql/transfers/presto_to_mysql.py @@ -50,14 +50,14 @@ class PrestoToMySqlOperator(BaseOperator): ui_color = '#a0e08c' @apply_defaults - def __init__(self, + def __init__(self, *, sql: str, mysql_table: str, presto_conn_id: str = 'presto_default', mysql_conn_id: str = 'mysql_default', mysql_preoperator: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.sql = sql self.mysql_table = mysql_table self.mysql_conn_id = mysql_conn_id diff --git a/airflow/providers/mysql/transfers/s3_to_mysql.py b/airflow/providers/mysql/transfers/s3_to_mysql.py index 53cb144690abc..2bce20fd74d9c 100644 --- a/airflow/providers/mysql/transfers/s3_to_mysql.py +++ b/airflow/providers/mysql/transfers/s3_to_mysql.py @@ -51,15 +51,15 @@ class S3ToMySqlOperator(BaseOperator): ui_color = '#f4a460' @apply_defaults - def __init__(self, + def __init__(self, *, s3_source_key: str, mysql_table: str, mysql_duplicate_key_handling: str = 'IGNORE', mysql_extra_options: Optional[str] = None, aws_conn_id: str = 'aws_default', mysql_conn_id: str = 'mysql_default', - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.s3_source_key = s3_source_key self.mysql_table = mysql_table self.mysql_duplicate_key_handling = mysql_duplicate_key_handling diff --git a/airflow/providers/mysql/transfers/vertica_to_mysql.py b/airflow/providers/mysql/transfers/vertica_to_mysql.py index 0e6b8a69dbeb2..146b82cbdb944 100644 --- a/airflow/providers/mysql/transfers/vertica_to_mysql.py +++ b/airflow/providers/mysql/transfers/vertica_to_mysql.py @@ -65,6 +65,7 @@ class VerticaToMySqlOperator(BaseOperator): @apply_defaults def __init__( self, + *, sql, mysql_table, vertica_conn_id='vertica_default', @@ -72,8 +73,8 @@ def __init__( mysql_preoperator=None, mysql_postoperator=None, bulk_load=False, - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.sql = sql self.mysql_table = mysql_table self.mysql_conn_id = mysql_conn_id diff --git a/airflow/providers/opsgenie/operators/opsgenie_alert.py b/airflow/providers/opsgenie/operators/opsgenie_alert.py index ce09574157563..f086816da91e7 100644 --- a/airflow/providers/opsgenie/operators/opsgenie_alert.py +++ b/airflow/providers/opsgenie/operators/opsgenie_alert.py @@ -68,7 +68,7 @@ class OpsgenieAlertOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults - def __init__(self, + def __init__(self, *, message, opsgenie_conn_id='opsgenie_default', alias=None, @@ -83,10 +83,9 @@ def __init__(self, priority=None, user=None, note=None, - *args, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.message = message self.opsgenie_conn_id = opsgenie_conn_id diff --git a/airflow/providers/oracle/operators/oracle.py b/airflow/providers/oracle/operators/oracle.py index 19bc143ad2233..5d7c98eb747fc 100644 --- a/airflow/providers/oracle/operators/oracle.py +++ b/airflow/providers/oracle/operators/oracle.py @@ -46,13 +46,13 @@ class OracleOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, sql: str, oracle_conn_id: str = 'oracle_default', parameters: Optional[Union[Mapping, Iterable]] = None, autocommit: bool = False, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.oracle_conn_id = oracle_conn_id self.sql = sql self.autocommit = autocommit diff --git a/airflow/providers/oracle/transfers/oracle_to_oracle.py b/airflow/providers/oracle/transfers/oracle_to_oracle.py index 6f845d3e8f045..64dbe7ad4e52d 100644 --- a/airflow/providers/oracle/transfers/oracle_to_oracle.py +++ b/airflow/providers/oracle/transfers/oracle_to_oracle.py @@ -46,15 +46,15 @@ class OracleToOracleOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, oracle_destination_conn_id, destination_table, oracle_source_conn_id, source_sql, source_sql_params=None, rows_chunk=5000, - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) if source_sql_params is None: source_sql_params = {} self.oracle_destination_conn_id = oracle_destination_conn_id diff --git a/airflow/providers/papermill/example_dags/example_papermill.py b/airflow/providers/papermill/example_dags/example_papermill.py index a462fc8d3898f..6b9910ab5f441 100644 --- a/airflow/providers/papermill/example_dags/example_papermill.py +++ b/airflow/providers/papermill/example_dags/example_papermill.py @@ -33,13 +33,13 @@ default_args = { 'owner': 'airflow', - 'start_date': days_ago(2) } with DAG( dag_id='example_papermill_operator', default_args=default_args, schedule_interval='0 0 * * *', + start_date=days_ago(2), dagrun_timeout=timedelta(minutes=60), tags=['example'], ) as dag_1: @@ -71,6 +71,7 @@ def check_notebook(inlets, execution_date): dag_id='example_papermill_operator', default_args=default_args, schedule_interval='0 0 * * *', + start_date=days_ago(2), dagrun_timeout=timedelta(minutes=60) ) as dag_2: diff --git a/airflow/providers/papermill/operators/papermill.py b/airflow/providers/papermill/operators/papermill.py index b34e5c88f4819..4c800506368f3 100644 --- a/airflow/providers/papermill/operators/papermill.py +++ b/airflow/providers/papermill/operators/papermill.py @@ -50,12 +50,12 @@ class PapermillOperator(BaseOperator): supports_lineage = True @apply_defaults - def __init__(self, + def __init__(self, *, input_nb: Optional[str] = None, output_nb: Optional[str] = None, parameters: Optional[Dict] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) if input_nb: self.inlets.append(NoteBook(url=input_nb, diff --git a/airflow/providers/postgres/operators/postgres.py b/airflow/providers/postgres/operators/postgres.py index 5304f74a36848..f0bce2186aee2 100644 --- a/airflow/providers/postgres/operators/postgres.py +++ b/airflow/providers/postgres/operators/postgres.py @@ -47,14 +47,14 @@ class PostgresOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, sql: str, postgres_conn_id: str = 'postgres_default', autocommit: bool = False, parameters: Optional[Union[Mapping, Iterable]] = None, database: Optional[str] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.sql = sql self.postgres_conn_id = postgres_conn_id self.autocommit = autocommit diff --git a/airflow/providers/qubole/example_dags/example_qubole.py b/airflow/providers/qubole/example_dags/example_qubole.py index 7b46e24c44fde..7076e34d530a9 100644 --- a/airflow/providers/qubole/example_dags/example_qubole.py +++ b/airflow/providers/qubole/example_dags/example_qubole.py @@ -30,7 +30,6 @@ default_args = { 'owner': 'airflow', 'depends_on_past': False, - 'start_date': days_ago(2), 'email': ['airflow@example.com'], 'email_on_failure': False, 'email_on_retry': False @@ -40,6 +39,7 @@ dag_id='example_qubole_operator', default_args=default_args, schedule_interval=None, + start_date=days_ago(2), tags=['example'], ) as dag: dag.doc_md = textwrap.dedent( @@ -236,6 +236,7 @@ def main(args: Array[String]) { dag_id='example_qubole_sensor', default_args=default_args, schedule_interval=None, + start_date=days_ago(2), doc_md=__doc__, tags=['example'], ) as dag2: diff --git a/airflow/providers/qubole/operators/qubole.py b/airflow/providers/qubole/operators/qubole.py index bae58c68e66af..278e21807cf1e 100644 --- a/airflow/providers/qubole/operators/qubole.py +++ b/airflow/providers/qubole/operators/qubole.py @@ -184,13 +184,12 @@ class QuboleOperator(BaseOperator): ) @apply_defaults - def __init__(self, qubole_conn_id="qubole_default", *args, **kwargs): - self.args = args + def __init__(self, *, qubole_conn_id="qubole_default", **kwargs): self.kwargs = kwargs self.kwargs['qubole_conn_id'] = qubole_conn_id self.hook = None filtered_base_kwargs = self._get_filtered_args(kwargs) - super().__init__(*args, **filtered_base_kwargs) + super().__init__(**filtered_base_kwargs) if self.on_failure_callback is None: self.on_failure_callback = QuboleHook.handle_failure_retry @@ -226,7 +225,7 @@ def get_jobs_id(self, ti): def get_hook(self): """Reinitialising the hook, as some template fields might have changed""" - return QuboleHook(*self.args, **self.kwargs) + return QuboleHook(**self.kwargs) def __getattribute__(self, name): if name in QuboleOperator.template_fields: diff --git a/airflow/providers/qubole/operators/qubole_check.py b/airflow/providers/qubole/operators/qubole_check.py index 0a59f23b5dae8..e010281005ef9 100644 --- a/airflow/providers/qubole/operators/qubole_check.py +++ b/airflow/providers/qubole/operators/qubole_check.py @@ -81,9 +81,9 @@ class QuboleCheckOperator(CheckOperator, QuboleOperator): ui_fgcolor = '#000' @apply_defaults - def __init__(self, qubole_conn_id="qubole_default", *args, **kwargs): + def __init__(self, *, qubole_conn_id="qubole_default", **kwargs): sql = get_sql_from_qbol_cmd(kwargs) - super().__init__(qubole_conn_id=qubole_conn_id, sql=sql, *args, **kwargs) + super().__init__(qubole_conn_id=qubole_conn_id, sql=sql, **kwargs) self.on_failure_callback = QuboleCheckHook.handle_failure_retry self.on_retry_callback = QuboleCheckHook.handle_failure_retry @@ -101,7 +101,7 @@ def get_hook(self, context=None): if hasattr(self, 'hook') and (self.hook is not None): return self.hook else: - return QuboleCheckHook(context=context, *self.args, **self.kwargs) + return QuboleCheckHook(context=context, **self.kwargs) def __getattribute__(self, name): if name in QuboleCheckOperator.template_fields: @@ -162,14 +162,14 @@ class QuboleValueCheckOperator(ValueCheckOperator, QuboleOperator): ui_fgcolor = '#000' @apply_defaults - def __init__(self, pass_value, tolerance=None, results_parser_callable=None, - qubole_conn_id="qubole_default", *args, **kwargs): + def __init__(self, *, pass_value, tolerance=None, results_parser_callable=None, + qubole_conn_id="qubole_default", **kwargs): sql = get_sql_from_qbol_cmd(kwargs) super().__init__( qubole_conn_id=qubole_conn_id, sql=sql, pass_value=pass_value, tolerance=tolerance, - *args, **kwargs) + **kwargs) self.results_parser_callable = results_parser_callable self.on_failure_callback = QuboleCheckHook.handle_failure_retry @@ -191,7 +191,6 @@ def get_hook(self, context=None): else: return QuboleCheckHook( context=context, - *self.args, results_parser_callable=self.results_parser_callable, **self.kwargs ) diff --git a/airflow/providers/qubole/sensors/qubole.py b/airflow/providers/qubole/sensors/qubole.py index fae8c281ca51c..b432ede1edbfd 100644 --- a/airflow/providers/qubole/sensors/qubole.py +++ b/airflow/providers/qubole/sensors/qubole.py @@ -35,7 +35,7 @@ class QuboleSensor(BaseSensorOperator): template_ext = ('.txt',) @apply_defaults - def __init__(self, data, qubole_conn_id="qubole_default", *args, **kwargs): + def __init__(self, *, data, qubole_conn_id="qubole_default", **kwargs): self.data = data self.qubole_conn_id = qubole_conn_id @@ -44,7 +44,7 @@ def __init__(self, data, qubole_conn_id="qubole_default", *args, **kwargs): "task '{0}' in dag '{1}'." .format(kwargs['task_id'], kwargs['dag'].dag_id)) - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def poke(self, context): @@ -83,9 +83,9 @@ class QuboleFileSensor(QuboleSensor): """ @apply_defaults - def __init__(self, *args, **kwargs): + def __init__(self, **kwargs): self.sensor_class = FileSensor - super().__init__(*args, **kwargs) + super().__init__(**kwargs) class QubolePartitionSensor(QuboleSensor): @@ -106,6 +106,6 @@ class QubolePartitionSensor(QuboleSensor): """ @apply_defaults - def __init__(self, *args, **kwargs): + def __init__(self, **kwargs): self.sensor_class = PartitionSensor - super().__init__(*args, **kwargs) + super().__init__(**kwargs) diff --git a/airflow/providers/redis/operators/redis_publish.py b/airflow/providers/redis/operators/redis_publish.py index 8c357af9cd20c..6cec1443158f8 100644 --- a/airflow/providers/redis/operators/redis_publish.py +++ b/airflow/providers/redis/operators/redis_publish.py @@ -39,13 +39,13 @@ class RedisPublishOperator(BaseOperator): @apply_defaults def __init__( - self, + self, *, channel: str, message: str, redis_conn_id: str = 'redis_default', - *args, **kwargs) -> None: + **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.redis_conn_id = redis_conn_id self.channel = channel self.message = message diff --git a/airflow/providers/redis/sensors/redis_key.py b/airflow/providers/redis/sensors/redis_key.py index 59167e90cdcbc..c90c965addbe1 100644 --- a/airflow/providers/redis/sensors/redis_key.py +++ b/airflow/providers/redis/sensors/redis_key.py @@ -30,8 +30,8 @@ class RedisKeySensor(BaseSensorOperator): ui_color = '#f0eee4' @apply_defaults - def __init__(self, key: str, redis_conn_id: str, *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + def __init__(self, *, key: str, redis_conn_id: str, **kwargs) -> None: + super().__init__(**kwargs) self.redis_conn_id = redis_conn_id self.key = key diff --git a/airflow/providers/redis/sensors/redis_pub_sub.py b/airflow/providers/redis/sensors/redis_pub_sub.py index 9be2b5657196d..b2f5e8b148738 100644 --- a/airflow/providers/redis/sensors/redis_pub_sub.py +++ b/airflow/providers/redis/sensors/redis_pub_sub.py @@ -36,8 +36,8 @@ class RedisPubSubSensor(BaseSensorOperator): ui_color = '#f0eee4' @apply_defaults - def __init__(self, channels: Union[List[str], str], redis_conn_id: str, *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + def __init__(self, *, channels: Union[List[str], str], redis_conn_id: str, **kwargs) -> None: + super().__init__(**kwargs) self.channels = channels self.redis_conn_id = redis_conn_id self.pubsub = RedisHook(redis_conn_id=self.redis_conn_id).get_conn().pubsub() diff --git a/airflow/providers/salesforce/example_dags/example_tableau_refresh_workbook.py b/airflow/providers/salesforce/example_dags/example_tableau_refresh_workbook.py index 223165b3c3ecc..68b30ca0c63fb 100644 --- a/airflow/providers/salesforce/example_dags/example_tableau_refresh_workbook.py +++ b/airflow/providers/salesforce/example_dags/example_tableau_refresh_workbook.py @@ -30,7 +30,6 @@ DEFAULT_ARGS = { 'owner': 'airflow', 'depends_on_past': False, - 'start_date': days_ago(2), 'email': ['airflow@example.com'], 'email_on_failure': False, 'email_on_retry': False @@ -41,6 +40,7 @@ default_args=DEFAULT_ARGS, dagrun_timeout=timedelta(hours=2), schedule_interval=None, + start_date=days_ago(2), tags=['example'], ) as dag: # Refreshes a workbook and waits until it succeeds. diff --git a/airflow/providers/salesforce/operators/tableau_refresh_workbook.py b/airflow/providers/salesforce/operators/tableau_refresh_workbook.py index 798a4245a31d9..8000680ee5f91 100644 --- a/airflow/providers/salesforce/operators/tableau_refresh_workbook.py +++ b/airflow/providers/salesforce/operators/tableau_refresh_workbook.py @@ -42,14 +42,13 @@ class TableauRefreshWorkbookOperator(BaseOperator): """ @apply_defaults - def __init__(self, + def __init__(self, *, workbook_name: str, site_id: Optional[str] = None, blocking: bool = True, tableau_conn_id: str = 'tableau_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.workbook_name = workbook_name self.site_id = site_id self.blocking = blocking diff --git a/airflow/providers/salesforce/sensors/tableau_job_status.py b/airflow/providers/salesforce/sensors/tableau_job_status.py index b3873d5d65bf8..7ec6b3c293bf1 100644 --- a/airflow/providers/salesforce/sensors/tableau_job_status.py +++ b/airflow/providers/salesforce/sensors/tableau_job_status.py @@ -46,13 +46,12 @@ class TableauJobStatusSensor(BaseSensorOperator): template_fields = ('job_id',) @apply_defaults - def __init__(self, + def __init__(self, *, job_id: str, site_id: Optional[str] = None, tableau_conn_id: str = 'tableau_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.tableau_conn_id = tableau_conn_id self.job_id = job_id self.site_id = site_id diff --git a/airflow/providers/segment/hooks/segment.py b/airflow/providers/segment/hooks/segment.py index 940621d5a5fec..2ff223ec7fb46 100644 --- a/airflow/providers/segment/hooks/segment.py +++ b/airflow/providers/segment/hooks/segment.py @@ -54,11 +54,11 @@ class SegmentHook(BaseHook): """ def __init__( self, - segment_conn_id='segment_default', - segment_debug_mode=False, + segment_conn_id: str = 'segment_default', + segment_debug_mode: bool = False, *args, **kwargs - ): + ) -> None: super().__init__() self.segment_conn_id = segment_conn_id self.segment_debug_mode = segment_debug_mode @@ -72,7 +72,7 @@ def __init__( if self.write_key is None: raise AirflowException('No Segment write key provided') - def get_conn(self): + def get_conn(self) -> analytics: self.log.info('Setting write key for Segment analytics connection') analytics.debug = self.segment_debug_mode if self.segment_debug_mode: @@ -81,7 +81,7 @@ def get_conn(self): analytics.write_key = self.write_key return analytics - def on_error(self, error, items): + def on_error(self, error: str, items: str) -> None: """ Handles error callbacks when using Segment with segment_debug_mode set to True """ diff --git a/airflow/providers/segment/operators/segment_track_event.py b/airflow/providers/segment/operators/segment_track_event.py index 4d3db18748245..01681a4034799 100644 --- a/airflow/providers/segment/operators/segment_track_event.py +++ b/airflow/providers/segment/operators/segment_track_event.py @@ -15,6 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from typing import Dict, Optional from airflow.models import BaseOperator from airflow.providers.segment.hooks.segment import SegmentHook @@ -41,15 +42,14 @@ class SegmentTrackEventOperator(BaseOperator): ui_color = '#ffd700' @apply_defaults - def __init__(self, - user_id, - event, - properties=None, - segment_conn_id='segment_default', - segment_debug_mode=False, - *args, - **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, *, + user_id: str, + event: str, + properties: Optional[dict] = None, + segment_conn_id: str = 'segment_default', + segment_debug_mode: bool = False, + **kwargs) -> None: + super().__init__(**kwargs) self.user_id = user_id self.event = event properties = properties or {} @@ -57,7 +57,7 @@ def __init__(self, self.segment_debug_mode = segment_debug_mode self.segment_conn_id = segment_conn_id - def execute(self, context): + def execute(self, context: Dict) -> None: hook = SegmentHook(segment_conn_id=self.segment_conn_id, segment_debug_mode=self.segment_debug_mode) @@ -66,7 +66,7 @@ def execute(self, context): self.event, self.user_id, self.properties) # pylint: disable=no-member - hook.track( + hook.track( # type: ignore user_id=self.user_id, event=self.event, properties=self.properties) diff --git a/airflow/providers/sendgrid/utils/emailer.py b/airflow/providers/sendgrid/utils/emailer.py index 43e61cf394661..f37fc82859369 100644 --- a/airflow/providers/sendgrid/utils/emailer.py +++ b/airflow/providers/sendgrid/utils/emailer.py @@ -23,6 +23,7 @@ import logging import mimetypes import os +from typing import Dict, Iterable, Optional, Union import sendgrid from sendgrid.helpers.mail import ( @@ -33,9 +34,17 @@ log = logging.getLogger(__name__) +AddressesType = Union[str, Iterable[str]] -def send_email(to, subject, html_content, files=None, cc=None, - bcc=None, sandbox_mode=False, **kwargs): + +def send_email(to: AddressesType, + subject: str, + html_content: str, + files: Optional[AddressesType] = None, + cc: Optional[AddressesType] = None, + bcc: Optional[AddressesType] = None, + sandbox_mode: bool = False, + **kwargs) -> None: """ Send an email with html content using `Sendgrid `__. @@ -102,7 +111,7 @@ def send_email(to, subject, html_content, files=None, cc=None, _post_sendgrid_mail(mail.get()) -def _post_sendgrid_mail(mail_data): +def _post_sendgrid_mail(mail_data: Dict) -> None: sendgrid_client = sendgrid.SendGridAPIClient(api_key=os.environ.get('SENDGRID_API_KEY')) response = sendgrid_client.client.mail.send.post(request_body=mail_data) # 2xx status code. diff --git a/airflow/providers/sftp/operators/sftp.py b/airflow/providers/sftp/operators/sftp.py index 6296ce6b1f596..fc2060bd9a238 100644 --- a/airflow/providers/sftp/operators/sftp.py +++ b/airflow/providers/sftp/operators/sftp.py @@ -19,6 +19,7 @@ This module contains SFTP operator. """ import os +from pathlib import Path from airflow.exceptions import AirflowException from airflow.models import BaseOperator @@ -81,7 +82,7 @@ class SFTPOperator(BaseOperator): template_fields = ('local_filepath', 'remote_filepath', 'remote_host') @apply_defaults - def __init__(self, + def __init__(self, *, ssh_hook=None, ssh_conn_id=None, remote_host=None, @@ -90,9 +91,8 @@ def __init__(self, operation=SFTPOperation.PUT, confirm=True, create_intermediate_dirs=False, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.ssh_hook = ssh_hook self.ssh_conn_id = ssh_conn_id self.remote_host = remote_host @@ -131,12 +131,7 @@ def execute(self, context): if self.operation.lower() == SFTPOperation.GET: local_folder = os.path.dirname(self.local_filepath) if self.create_intermediate_dirs: - # Create Intermediate Directories if it doesn't exist - try: - os.makedirs(local_folder) - except OSError: - if not os.path.isdir(local_folder): - raise + Path(local_folder).mkdir(parents=True, exist_ok=True) file_msg = "from {0} to {1}".format(self.remote_filepath, self.local_filepath) self.log.info("Starting to transfer %s", file_msg) diff --git a/airflow/providers/sftp/sensors/sftp.py b/airflow/providers/sftp/sensors/sftp.py index 2973d5494db4c..de3e993fc726e 100644 --- a/airflow/providers/sftp/sensors/sftp.py +++ b/airflow/providers/sftp/sensors/sftp.py @@ -37,8 +37,8 @@ class SFTPSensor(BaseSensorOperator): template_fields = ('path',) @apply_defaults - def __init__(self, path, sftp_conn_id='sftp_default', *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, *, path, sftp_conn_id='sftp_default', **kwargs): + super().__init__(**kwargs) self.path = path self.hook = None self.sftp_conn_id = sftp_conn_id diff --git a/airflow/providers/singularity/operators/singularity.py b/airflow/providers/singularity/operators/singularity.py index 740fbc82b3321..759398ac0a28a 100644 --- a/airflow/providers/singularity/operators/singularity.py +++ b/airflow/providers/singularity/operators/singularity.py @@ -65,7 +65,7 @@ class SingularityOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments - self, + self, *, image: str, command: Union[str, List[str]], start_command: Optional[Union[str, List[str]]] = None, @@ -76,10 +76,9 @@ def __init__( # pylint: disable=too-many-arguments volumes: Optional[List[str]] = None, options: Optional[List[str]] = None, auto_remove: Optional[bool] = False, - *args, **kwargs) -> None: - super(SingularityOperator, self).__init__(*args, **kwargs) + super(SingularityOperator, self).__init__(**kwargs) self.auto_remove = auto_remove self.command = command self.start_command = start_command diff --git a/airflow/providers/slack/hooks/slack.py b/airflow/providers/slack/hooks/slack.py index d3553b203e7a5..e231d50c41d4a 100644 --- a/airflow/providers/slack/hooks/slack.py +++ b/airflow/providers/slack/hooks/slack.py @@ -68,7 +68,7 @@ def __init__( ) -> None: super().__init__() self.token = self.__get_token(token, slack_conn_id) - self.client = WebClient(token, **client_args) + self.client = WebClient(self.token, **client_args) def __get_token(self, token, slack_conn_id): if token is not None: diff --git a/airflow/providers/slack/operators/slack.py b/airflow/providers/slack/operators/slack.py index 5dc2b03d47dec..67d1db52e83c5 100644 --- a/airflow/providers/slack/operators/slack.py +++ b/airflow/providers/slack/operators/slack.py @@ -44,7 +44,7 @@ class SlackAPIOperator(BaseOperator): """ @apply_defaults - def __init__(self, + def __init__(self, *, slack_conn_id: Optional[str] = None, token: Optional[str] = None, method: Optional[str] = None, diff --git a/airflow/providers/slack/operators/slack_webhook.py b/airflow/providers/slack/operators/slack_webhook.py index 5e457a55f3879..2c0023e14bdc3 100644 --- a/airflow/providers/slack/operators/slack_webhook.py +++ b/airflow/providers/slack/operators/slack_webhook.py @@ -63,7 +63,7 @@ class SlackWebhookOperator(SimpleHttpOperator): # pylint: disable=too-many-arguments @apply_defaults - def __init__(self, + def __init__(self, *, http_conn_id=None, webhook_token=None, message="", @@ -75,10 +75,8 @@ def __init__(self, icon_url=None, link_names=False, proxy=None, - *args, **kwargs): super().__init__(endpoint=webhook_token, - *args, **kwargs) self.http_conn_id = http_conn_id self.webhook_token = webhook_token diff --git a/airflow/providers/snowflake/example_dags/example_snowflake.py b/airflow/providers/snowflake/example_dags/example_snowflake.py index 70261019bd555..4fea24abcea6e 100644 --- a/airflow/providers/snowflake/example_dags/example_snowflake.py +++ b/airflow/providers/snowflake/example_dags/example_snowflake.py @@ -43,12 +43,12 @@ default_args = { 'owner': 'airflow', - 'start_date': days_ago(2), } dag = DAG( 'example_snowflake', default_args=default_args, + start_date=days_ago(2), tags=['example'], ) diff --git a/airflow/providers/snowflake/operators/snowflake.py b/airflow/providers/snowflake/operators/snowflake.py index f8410e65a5b49..6a7e1766de425 100644 --- a/airflow/providers/snowflake/operators/snowflake.py +++ b/airflow/providers/snowflake/operators/snowflake.py @@ -63,10 +63,10 @@ class SnowflakeOperator(BaseOperator): @apply_defaults def __init__( - self, sql, snowflake_conn_id='snowflake_default', parameters=None, + self, *, sql, snowflake_conn_id='snowflake_default', parameters=None, autocommit=True, warehouse=None, database=None, role=None, - schema=None, authenticator=None, *args, **kwargs): - super().__init__(*args, **kwargs) + schema=None, authenticator=None, **kwargs): + super().__init__(**kwargs) self.snowflake_conn_id = snowflake_conn_id self.sql = sql self.autocommit = autocommit diff --git a/airflow/providers/snowflake/transfers/s3_to_snowflake.py b/airflow/providers/snowflake/transfers/s3_to_snowflake.py index bbd6192345e7f..361545dfd28e9 100644 --- a/airflow/providers/snowflake/transfers/s3_to_snowflake.py +++ b/airflow/providers/snowflake/transfers/s3_to_snowflake.py @@ -47,6 +47,7 @@ class S3ToSnowflakeOperator(BaseOperator): @apply_defaults def __init__(self, + *, s3_keys, table, stage, @@ -55,8 +56,8 @@ def __init__(self, columns_array=None, autocommit=True, snowflake_conn_id='snowflake_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.s3_keys = s3_keys self.table = table self.stage = stage diff --git a/airflow/providers/snowflake/transfers/snowflake_to_slack.py b/airflow/providers/snowflake/transfers/snowflake_to_slack.py index 9714695239464..02fe167033e85 100644 --- a/airflow/providers/snowflake/transfers/snowflake_to_slack.py +++ b/airflow/providers/snowflake/transfers/snowflake_to_slack.py @@ -69,6 +69,7 @@ class SnowflakeToSlackOperator(BaseOperator): @apply_defaults def __init__( # pylint: disable=too-many-arguments self, + *, sql: str, slack_message: str, snowflake_conn_id: str = 'snowflake_default', @@ -80,9 +81,9 @@ def __init__( # pylint: disable=too-many-arguments schema: Optional[str] = None, role: Optional[str] = None, slack_token: Optional[str] = None, - *args, **kwargs + **kwargs ) -> None: - super(SnowflakeToSlackOperator, self).__init__(*args, **kwargs) + super(SnowflakeToSlackOperator, self).__init__(**kwargs) self.snowflake_conn_id = snowflake_conn_id self.sql = sql diff --git a/airflow/providers/sqlite/hooks/sqlite.py b/airflow/providers/sqlite/hooks/sqlite.py index b54b9ede025dc..a4ac44362090f 100644 --- a/airflow/providers/sqlite/hooks/sqlite.py +++ b/airflow/providers/sqlite/hooks/sqlite.py @@ -28,12 +28,12 @@ class SqliteHook(DbApiHook): conn_name_attr = 'sqlite_conn_id' default_conn_name = 'sqlite_default' - supports_autocommit = False - def get_conn(self): + def get_conn(self) -> sqlite3.dbapi2.Connection: """ Returns a sqlite connection object """ - conn = self.get_connection(self.sqlite_conn_id) # pylint: disable=no-member - conn = sqlite3.connect(conn.host) + conn_id = getattr(self, self.conn_name_attr) + airflow_conn = self.get_connection(conn_id) + conn = sqlite3.connect(airflow_conn.host) return conn diff --git a/airflow/providers/sqlite/operators/sqlite.py b/airflow/providers/sqlite/operators/sqlite.py index 1310b728939fd..c51b4e9e38c6d 100644 --- a/airflow/providers/sqlite/operators/sqlite.py +++ b/airflow/providers/sqlite/operators/sqlite.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from typing import Iterable, Mapping, Optional, Union +from typing import Any, Iterable, Mapping, Optional, Union from airflow.models import BaseOperator from airflow.providers.sqlite.hooks.sqlite import SqliteHook @@ -42,16 +42,17 @@ class SqliteOperator(BaseOperator): @apply_defaults def __init__( self, + *, sql: str, sqlite_conn_id: str = 'sqlite_default', parameters: Optional[Union[Mapping, Iterable]] = None, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.sqlite_conn_id = sqlite_conn_id self.sql = sql self.parameters = parameters or [] - def execute(self, context): + def execute(self, context: Mapping[Any, Any]) -> None: self.log.info('Executing: %s', self.sql) hook = SqliteHook(sqlite_conn_id=self.sqlite_conn_id) hook.run(self.sql, parameters=self.parameters) diff --git a/airflow/providers/ssh/operators/ssh.py b/airflow/providers/ssh/operators/ssh.py index 04c996f615968..80b9c34f08191 100644 --- a/airflow/providers/ssh/operators/ssh.py +++ b/airflow/providers/ssh/operators/ssh.py @@ -59,6 +59,7 @@ class SSHOperator(BaseOperator): @apply_defaults def __init__(self, + *, ssh_hook=None, ssh_conn_id=None, remote_host=None, @@ -66,9 +67,8 @@ def __init__(self, timeout=10, environment=None, get_pty=False, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.ssh_hook = ssh_hook self.ssh_conn_id = ssh_conn_id self.remote_host = remote_host diff --git a/airflow/providers/vertica/operators/vertica.py b/airflow/providers/vertica/operators/vertica.py index a96795099fd7c..923b1f687a3d2 100644 --- a/airflow/providers/vertica/operators/vertica.py +++ b/airflow/providers/vertica/operators/vertica.py @@ -39,10 +39,10 @@ class VerticaOperator(BaseOperator): ui_color = '#b4e0ff' @apply_defaults - def __init__(self, sql: Union[str, List[str]], + def __init__(self, *, sql: Union[str, List[str]], vertica_conn_id: str = 'vertica_default', - *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) + **kwargs: Any) -> None: + super().__init__(**kwargs) self.vertica_conn_id = vertica_conn_id self.sql = sql diff --git a/airflow/providers/yandex/example_dags/example_yandexcloud_dataproc.py b/airflow/providers/yandex/example_dags/example_yandexcloud_dataproc.py index a23156e0fb8a0..df4fa319d9955 100644 --- a/airflow/providers/yandex/example_dags/example_yandexcloud_dataproc.py +++ b/airflow/providers/yandex/example_dags/example_yandexcloud_dataproc.py @@ -37,13 +37,13 @@ default_args = { 'owner': 'airflow', - 'start_date': days_ago(1) } with DAG( 'example_yandexcloud_dataproc_operator', default_args=default_args, schedule_interval=None, + start_date=days_ago(1), tags=['example'], ) as dag: create_cluster = DataprocCreateClusterOperator( diff --git a/airflow/providers/yandex/operators/yandexcloud_dataproc.py b/airflow/providers/yandex/operators/yandexcloud_dataproc.py index e72a516334ed5..cfbb74c921432 100644 --- a/airflow/providers/yandex/operators/yandexcloud_dataproc.py +++ b/airflow/providers/yandex/operators/yandexcloud_dataproc.py @@ -83,6 +83,7 @@ class DataprocCreateClusterOperator(BaseOperator): # pylint: disable=too-many-locals @apply_defaults def __init__(self, + *, folder_id: Optional[str] = None, cluster_name: Optional[str] = None, cluster_description: str = '', @@ -105,9 +106,8 @@ def __init__(self, computenode_disk_type: str = 'network-ssd', computenode_count: int = 0, connection_id: Optional[str] = None, - *arguments, **kwargs): - super().__init__(*arguments, **kwargs) + super().__init__(**kwargs) self.folder_id = folder_id self.connection_id = connection_id self.cluster_name = cluster_name @@ -175,12 +175,11 @@ class DataprocDeleteClusterOperator(BaseOperator): template_fields = ['cluster_id'] @apply_defaults - def __init__(self, + def __init__(self, *, connection_id: Optional[str] = None, cluster_id: Optional[str] = None, - *arguments, **kwargs): - super().__init__(*arguments, **kwargs) + super().__init__(**kwargs) self.connection_id = connection_id self.cluster_id = cluster_id self.hook = None @@ -222,7 +221,7 @@ class DataprocCreateHiveJobOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults - def __init__(self, + def __init__(self, *, query: Optional[str] = None, query_file_uri: Optional[str] = None, script_variables: Optional[Dict[str, str]] = None, @@ -231,9 +230,8 @@ def __init__(self, name: str = 'Hive job', cluster_id: Optional[str] = None, connection_id: Optional[str] = None, - *arguments, **kwargs): - super().__init__(*arguments, **kwargs) + super().__init__(**kwargs) self.query = query self.query_file_uri = query_file_uri self.script_variables = script_variables @@ -294,7 +292,7 @@ class DataprocCreateMapReduceJobOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults - def __init__(self, + def __init__(self, *, main_class: Optional[str] = None, main_jar_file_uri: Optional[str] = None, jar_file_uris: Optional[Iterable[str]] = None, @@ -305,9 +303,8 @@ def __init__(self, name: str = 'Mapreduce job', cluster_id: Optional[str] = None, connection_id: Optional[str] = None, - *arguments, **kwargs): - super().__init__(*arguments, **kwargs) + super().__init__(**kwargs) self.main_class = main_class self.main_jar_file_uri = main_jar_file_uri self.jar_file_uris = jar_file_uris @@ -371,7 +368,7 @@ class DataprocCreateSparkJobOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults - def __init__(self, + def __init__(self, *, main_class: Optional[str] = None, main_jar_file_uri: Optional[str] = None, jar_file_uris: Optional[Iterable[str]] = None, @@ -382,9 +379,8 @@ def __init__(self, name: str = 'Spark job', cluster_id: Optional[str] = None, connection_id: Optional[str] = None, - *arguments, **kwargs): - super().__init__(*arguments, **kwargs) + super().__init__(**kwargs) self.main_class = main_class self.main_jar_file_uri = main_jar_file_uri self.jar_file_uris = jar_file_uris @@ -448,7 +444,7 @@ class DataprocCreatePysparkJobOperator(BaseOperator): # pylint: disable=too-many-arguments @apply_defaults - def __init__(self, + def __init__(self, *, main_python_file_uri: Optional[str] = None, python_file_uris: Optional[Iterable[str]] = None, jar_file_uris: Optional[Iterable[str]] = None, @@ -459,9 +455,8 @@ def __init__(self, name: str = 'Pyspark job', cluster_id: Optional[str] = None, connection_id: Optional[str] = None, - *arguments, **kwargs): - super().__init__(*arguments, **kwargs) + super().__init__(**kwargs) self.main_python_file_uri = main_python_file_uri self.python_file_uris = python_file_uris self.jar_file_uris = jar_file_uris diff --git a/airflow/secrets/local_filesystem.py b/airflow/secrets/local_filesystem.py index 0c246afc4054e..29754d52dc2fa 100644 --- a/airflow/secrets/local_filesystem.py +++ b/airflow/secrets/local_filesystem.py @@ -28,7 +28,9 @@ import yaml -from airflow.exceptions import AirflowException, AirflowFileParseException, FileSyntaxError +from airflow.exceptions import ( + AirflowException, AirflowFileParseException, ConnectionNotUnique, FileSyntaxError, +) from airflow.secrets.base_secrets import BaseSecretsBackend from airflow.utils.file import COMMENT_PATTERN from airflow.utils.log.logging_mixin import LoggingMixin @@ -252,6 +254,10 @@ def load_connections(file_path: str): connections_by_conn_id[key].append(_create_connection(key, secret_value)) else: connections_by_conn_id[key].append(_create_connection(key, secret_values)) + + if len(connections_by_conn_id[key]) > 1: + raise ConnectionNotUnique(f"Found multiple values for {key} in {file_path}") + num_conn = sum(map(len, connections_by_conn_id.values())) log.debug("Loaded %d connections", num_conn) diff --git a/airflow/sensors/base_sensor_operator.py b/airflow/sensors/base_sensor_operator.py index 4c70b2bfe5ca6..a1e9c98a8c35c 100644 --- a/airflow/sensors/base_sensor_operator.py +++ b/airflow/sensors/base_sensor_operator.py @@ -68,15 +68,14 @@ class BaseSensorOperator(BaseOperator, SkipMixin): valid_modes = ['poke', 'reschedule'] # type: Iterable[str] @apply_defaults - def __init__(self, + def __init__(self, *, poke_interval: float = 60, timeout: float = 60 * 60 * 24 * 7, soft_fail: bool = False, mode: str = 'poke', exponential_backoff: bool = False, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.poke_interval = poke_interval self.soft_fail = soft_fail self.timeout = timeout @@ -122,7 +121,6 @@ def execute(self, context: Dict) -> Any: # give it a chance and fail with timeout. # This gives the ability to set up non-blocking AND soft-fail sensors. if self.soft_fail and not context['ti'].is_eligible_to_retry(): - self._do_skip_downstream_tasks(context) raise AirflowSkipException( f"Snap. Time is OUT. DAG id: {log_dag_id}") else: @@ -137,12 +135,6 @@ def execute(self, context: Dict) -> Any: try_number += 1 self.log.info("Success criteria met. Exiting.") - def _do_skip_downstream_tasks(self, context: Dict) -> None: - downstream_tasks = context['task'].get_flat_relatives(upstream=False) - self.log.debug("Downstream task_ids %s", downstream_tasks) - if downstream_tasks: - self.skip(context['dag_run'], context['ti'].execution_date, downstream_tasks) - def _get_next_poke_interval(self, started_at, try_number): """ Using the similar logic which is used for exponential backoff retry delay for operators. diff --git a/airflow/sensors/bash.py b/airflow/sensors/bash.py index 41837730bfb9c..711a7d40d24e8 100644 --- a/airflow/sensors/bash.py +++ b/airflow/sensors/bash.py @@ -45,12 +45,12 @@ class BashSensor(BaseSensorOperator): template_fields = ('bash_command', 'env') @apply_defaults - def __init__(self, + def __init__(self, *, bash_command, env=None, output_encoding='utf-8', - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.bash_command = bash_command self.env = env self.output_encoding = output_encoding diff --git a/airflow/sensors/date_time_sensor.py b/airflow/sensors/date_time_sensor.py index 4af3bd2b746f4..0d479ed6394f3 100644 --- a/airflow/sensors/date_time_sensor.py +++ b/airflow/sensors/date_time_sensor.py @@ -58,9 +58,9 @@ class DateTimeSensor(BaseSensorOperator): @apply_defaults def __init__( - self, target_time: Union[str, datetime.datetime], *args, **kwargs + self, *, target_time: Union[str, datetime.datetime], **kwargs ) -> None: - super().__init__(*args, **kwargs) + super().__init__(**kwargs) if isinstance(target_time, datetime.datetime): self.target_time = target_time.isoformat() elif isinstance(target_time, str): diff --git a/airflow/sensors/external_task_sensor.py b/airflow/sensors/external_task_sensor.py index cd4143b3981c0..cb45d53cd11e2 100644 --- a/airflow/sensors/external_task_sensor.py +++ b/airflow/sensors/external_task_sensor.py @@ -66,7 +66,7 @@ class ExternalTaskSensor(BaseSensorOperator): ui_color = '#19647e' @apply_defaults - def __init__(self, + def __init__(self, *, external_dag_id, external_task_id=None, allowed_states=None, @@ -74,9 +74,8 @@ def __init__(self, execution_delta=None, execution_date_fn=None, check_existence=False, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.allowed_states = allowed_states or [State.SUCCESS] self.failed_states = failed_states or [] @@ -91,15 +90,14 @@ def __init__(self, if external_task_id: if not total_states <= set(State.task_states): raise ValueError( - 'Valid values for `allowed_states` and `failed_states` ' - 'when `external_task_id` is not `None`: {}'.format(State.task_states) - ) - else: - if not total_states <= set(State.dag_states): - raise ValueError( - 'Valid values for `allowed_states` and `failed_states` ' - 'when `external_task_id` is `None`: {}'.format(State.dag_states) + f'Valid values for `allowed_states` and `failed_states` ' + f'when `external_task_id` is not `None`: {State.task_states}' ) + elif not total_states <= set(State.dag_states): + raise ValueError( + f'Valid values for `allowed_states` and `failed_states` ' + f'when `external_task_id` is `None`: {State.dag_states}' + ) if execution_delta is not None and execution_date_fn is not None: raise ValueError( @@ -140,20 +138,17 @@ def poke(self, context, session=None): ).first() if not dag_to_wait: - raise AirflowException('The external DAG ' - '{} does not exist.'.format(self.external_dag_id)) - else: - if not os.path.exists(dag_to_wait.fileloc): - raise AirflowException('The external DAG ' - '{} was deleted.'.format(self.external_dag_id)) + raise AirflowException(f'The external DAG {self.external_dag_id} does not exist.') + elif not os.path.exists(dag_to_wait.fileloc): + raise AirflowException(f'The external DAG {self.external_dag_id} was deleted.') if self.external_task_id: refreshed_dag_info = DagBag(dag_to_wait.fileloc).get_dag(self.external_dag_id) if not refreshed_dag_info.has_task(self.external_task_id): - raise AirflowException('The external task' - '{} in DAG {} does not exist.' - .format(self.external_task_id, - self.external_dag_id)) + raise AirflowException( + f'The external task {self.external_task_id} in ' + f'DAG {self.external_dag_id} does not exist.' + ) self.has_checked_existence = True count_allowed = self.get_count(dttm_filter, session, self.allowed_states) @@ -165,11 +160,11 @@ def poke(self, context, session=None): session.commit() if count_failed == len(dttm_filter): if self.external_task_id: - raise AirflowException('The external task {} in DAG {} failed.' - .format(self.external_task_id, self.external_dag_id)) + raise AirflowException( + f'The external task {self.external_task_id} in DAG {self.external_dag_id} failed.' + ) else: - raise AirflowException('The external DAG {} failed.' - .format(self.external_dag_id)) + raise AirflowException(f'The external DAG {self.external_dag_id} failed.') return count_allowed == len(dttm_filter) @@ -217,12 +212,12 @@ def _handle_execution_date_fn(self, context): num_fxn_params = self.execution_date_fn.__code__.co_argcount if num_fxn_params == 1: return self.execution_date_fn(context['execution_date']) - elif num_fxn_params == 2: + if num_fxn_params == 2: return self.execution_date_fn(context['execution_date'], context) - else: - raise AirflowException( - 'execution_date_fn passed {} args but only allowed up to 2'.format(num_fxn_params) - ) + + raise AirflowException( + f'execution_date_fn passed {num_fxn_params} args but only allowed up to 2' + ) class ExternalTaskMarker(DummyOperator): @@ -247,14 +242,13 @@ class ExternalTaskMarker(DummyOperator): ui_color = '#19647e' @apply_defaults - def __init__(self, + def __init__(self, *, external_dag_id, external_task_id, execution_date: Optional[Union[str, datetime.datetime]] = "{{ execution_date.isoformat() }}", recursion_depth: int = 10, - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.external_dag_id = external_dag_id self.external_task_id = external_task_id if isinstance(execution_date, datetime.datetime): diff --git a/airflow/sensors/filesystem.py b/airflow/sensors/filesystem.py index 8f1446698c7e3..e8022d77d143d 100644 --- a/airflow/sensors/filesystem.py +++ b/airflow/sensors/filesystem.py @@ -37,18 +37,17 @@ class FileSensor(BaseSensorOperator): :type fs_conn_id: str :param filepath: File or folder name (relative to the base path set within the connection), can be a glob. - :type fs_conn_id: str + :type filepath: str """ template_fields = ('filepath',) ui_color = '#91818a' @apply_defaults - def __init__(self, + def __init__(self, *, filepath, fs_conn_id='fs_default', - *args, **kwargs): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.filepath = filepath self.fs_conn_id = fs_conn_id diff --git a/airflow/sensors/python.py b/airflow/sensors/python.py index 16621c1712916..183405188635b 100644 --- a/airflow/sensors/python.py +++ b/airflow/sensors/python.py @@ -50,13 +50,13 @@ class PythonSensor(BaseSensorOperator): @apply_defaults def __init__( - self, + self, *, python_callable: Callable, op_args: Optional[List] = None, op_kwargs: Optional[Dict] = None, templates_dict: Optional[Dict] = None, - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.python_callable = python_callable self.op_args = op_args or [] self.op_kwargs = op_kwargs or {} diff --git a/airflow/sensors/sql_sensor.py b/airflow/sensors/sql_sensor.py index 2d967845d1e17..d0c72900f84a1 100644 --- a/airflow/sensors/sql_sensor.py +++ b/airflow/sensors/sql_sensor.py @@ -56,15 +56,15 @@ class SqlSensor(BaseSensorOperator): ui_color = '#7c7287' @apply_defaults - def __init__(self, conn_id, sql, parameters=None, success=None, failure=None, fail_on_empty=False, - *args, **kwargs): + def __init__(self, *, conn_id, sql, parameters=None, success=None, failure=None, fail_on_empty=False, + **kwargs): self.conn_id = conn_id self.sql = sql self.parameters = parameters self.success = success self.failure = failure self.fail_on_empty = fail_on_empty - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def _get_hook(self): conn = BaseHook.get_connection(self.conn_id) diff --git a/airflow/sensors/time_delta_sensor.py b/airflow/sensors/time_delta_sensor.py index 9b5b33328bd7e..4a7461f71d04a 100644 --- a/airflow/sensors/time_delta_sensor.py +++ b/airflow/sensors/time_delta_sensor.py @@ -33,8 +33,8 @@ class TimeDeltaSensor(BaseSensorOperator): """ @apply_defaults - def __init__(self, delta, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, *, delta, **kwargs): + super().__init__(**kwargs) self.delta = delta def poke(self, context): diff --git a/airflow/sensors/time_sensor.py b/airflow/sensors/time_sensor.py index eb6846b2957e3..179fecbe587e4 100644 --- a/airflow/sensors/time_sensor.py +++ b/airflow/sensors/time_sensor.py @@ -30,8 +30,8 @@ class TimeSensor(BaseSensorOperator): """ @apply_defaults - def __init__(self, target_time, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, *, target_time, **kwargs): + super().__init__(**kwargs) self.target_time = target_time def poke(self, context): diff --git a/airflow/sensors/weekday_sensor.py b/airflow/sensors/weekday_sensor.py index 90b3834e42253..0340794664587 100644 --- a/airflow/sensors/weekday_sensor.py +++ b/airflow/sensors/weekday_sensor.py @@ -73,10 +73,10 @@ class DayOfWeekSensor(BaseSensorOperator): """ @apply_defaults - def __init__(self, week_day, + def __init__(self, *, week_day, use_task_execution_day=False, - *args, **kwargs): - super().__init__(*args, **kwargs) + **kwargs): + super().__init__(**kwargs) self.week_day = week_day self.use_task_execution_day = use_task_execution_day if isinstance(self.week_day, str): diff --git a/airflow/utils/file.py b/airflow/utils/file.py index a6282d21ca361..143fc30f9c7a7 100644 --- a/airflow/utils/file.py +++ b/airflow/utils/file.py @@ -20,6 +20,7 @@ import os import re import zipfile +from pathlib import Path from typing import Dict, Generator, List, Optional, Pattern from airflow.configuration import conf @@ -50,14 +51,12 @@ def mkdirs(path, mode): :param mode: The mode to give to the directory e.g. 0o755, ignores umask :type mode: int """ - try: - o_umask = os.umask(0) - os.makedirs(path, mode) - except OSError: - if not os.path.isdir(path): - raise - finally: - os.umask(o_umask) + import warnings + warnings.warn( + f"This function is deprecated. Please use `pathlib.Path({path}).mkdir`", + DeprecationWarning, stacklevel=2 + ) + Path(path).mkdir(mode=mode, parents=True, exist_ok=True) ZIP_REGEX = re.compile(r'((.*\.zip){})?(.*)'.format(re.escape(os.sep))) diff --git a/airflow/utils/log/file_processor_handler.py b/airflow/utils/log/file_processor_handler.py index 9b1f60d247166..a552c115708f2 100644 --- a/airflow/utils/log/file_processor_handler.py +++ b/airflow/utils/log/file_processor_handler.py @@ -19,6 +19,7 @@ import logging import os from datetime import datetime +from pathlib import Path from airflow import settings from airflow.utils.helpers import parse_template_string @@ -43,15 +44,7 @@ def __init__(self, base_log_folder, filename_template): parse_template_string(filename_template) self._cur_date = datetime.today() - if not os.path.exists(self._get_log_directory()): - try: - os.makedirs(self._get_log_directory()) - except OSError: - # only ignore case where the directory already exist - if not os.path.isdir(self._get_log_directory()): - raise - - logging.warning("%s already exists", self._get_log_directory()) + Path(self._get_log_directory()).mkdir(parents=True, exist_ok=True) self._symlink_latest_log_directory() @@ -137,12 +130,7 @@ def _init_file(self, filename): log_file_path = os.path.abspath(relative_log_file_path) directory = os.path.dirname(log_file_path) - if not os.path.exists(directory): - try: - os.makedirs(directory) - except OSError: - if not os.path.isdir(directory): - raise + Path(directory).mkdir(parents=True, exist_ok=True) if not os.path.exists(log_file_path): open(log_file_path, "a").close() diff --git a/airflow/utils/log/file_task_handler.py b/airflow/utils/log/file_task_handler.py index e26cf90056cee..281a1a1611296 100644 --- a/airflow/utils/log/file_task_handler.py +++ b/airflow/utils/log/file_task_handler.py @@ -18,13 +18,13 @@ """File logging handler for tasks.""" import logging import os +from pathlib import Path from typing import Optional import requests from airflow.configuration import AirflowConfigException, conf from airflow.models import TaskInstance -from airflow.utils.file import mkdirs from airflow.utils.helpers import parse_template_string @@ -223,10 +223,7 @@ def _init_file(self, ti): # operator is not compatible with impersonation (e.g. if a Celery executor is used # for a SubDag operator and the SubDag operator has a different owner than the # parent DAG) - if not os.path.exists(directory): - # Create the directory as globally writable using custom mkdirs - # as os.makedirs doesn't set mode properly. - mkdirs(directory, 0o777) + Path(directory).mkdir(mode=0o777, parents=True, exist_ok=True) if not os.path.exists(full_path): open(full_path, "a").close() diff --git a/airflow/utils/python_virtualenv.py b/airflow/utils/python_virtualenv.py index 09b6d37f52c92..ff946442b1865 100644 --- a/airflow/utils/python_virtualenv.py +++ b/airflow/utils/python_virtualenv.py @@ -19,8 +19,11 @@ """ Utilities for creating a virtual environment """ +import os from typing import List, Optional +import jinja2 + from airflow.utils.process_utils import execute_in_subprocess @@ -69,3 +72,22 @@ def prepare_virtualenv( execute_in_subprocess(pip_cmd) return '{}/bin/python'.format(venv_directory) + + +def write_python_script(jinja_context: dict, filename: str): + """ + Renders the python script to a file to execute in the virtual environment. + + :param jinja_context: The jinja context variables to unpack and replace with its placeholders in the + template file. + :type jinja_context: dict + :param filename: The name of the file to dump the rendered script to. + :type filename: str + """ + template_loader = jinja2.FileSystemLoader(searchpath=os.path.dirname(__file__)) + template_env = jinja2.Environment( + loader=template_loader, + undefined=jinja2.StrictUndefined + ) + template = template_env.get_template('python_virtualenv_script.jinja2') + template.stream(**jinja_context).dump(filename) diff --git a/airflow/utils/python_virtualenv_script.jinja2 b/airflow/utils/python_virtualenv_script.jinja2 new file mode 100644 index 0000000000000..f2dd87525d807 --- /dev/null +++ b/airflow/utils/python_virtualenv_script.jinja2 @@ -0,0 +1,42 @@ +{# + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. +#} + +import {{ pickling_library }} +import sys + +# Read args +{% if op_args or op_kwargs %} +with open(sys.argv[1], "rb") as file: + arg_dict = {{ pickling_library }}.load(file) +{% else %} +arg_dict = {"args": [], "kwargs": {}} +{% endif %} + +# Read string args +with open(sys.argv[3], "r") as file: + virtualenv_string_args = list(map(lambda x: x.strip(), list(file))) + +# Script +{{ python_callable_source }} +res = {{ python_callable }}(*arg_dict["args"], **arg_dict["kwargs"]) + +# Write output +with open(sys.argv[2], "wb") as file: + if res: + {{ pickling_library }}.dump(res, file) diff --git a/airflow/www/package.json b/airflow/www/package.json index b400ecd3bde69..955df82efc1c8 100644 --- a/airflow/www/package.json +++ b/airflow/www/package.json @@ -28,32 +28,32 @@ "devDependencies": { "babel": "^6.23.0", "babel-core": "^6.26.3", - "babel-eslint": "^8.2.6", - "babel-loader": "^7.1.4", + "babel-eslint": "^10.1.0", + "babel-loader": "^8.1.0", "babel-plugin-css-modules-transform": "^1.6.1", "babel-polyfill": "^6.26.0", - "clean-webpack-plugin": "^0.1.19", - "copy-webpack-plugin": "^5.1.1", + "clean-webpack-plugin": "^3.0.0", + "copy-webpack-plugin": "^6.0.3", "css-loader": "^3.4.2", - "eslint": "^4.19.1", - "eslint-config-airbnb-base": "^13.0.0", - "eslint-plugin-html": "^4.0.5", - "eslint-plugin-import": "^2.13.0", - "eslint-plugin-node": "^7.0.1", - "eslint-plugin-promise": "^3.8.0", - "eslint-plugin-standard": "^3.1.0", - "file-loader": "^1.1.11", - "handlebars": "^4.5.3", - "imports-loader": "^0.8.0", + "eslint": "^7.5.0", + "eslint-config-airbnb-base": "^14.2.0", + "eslint-plugin-html": "^6.0.2", + "eslint-plugin-import": "^2.22.0", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^4.2.1", + "eslint-plugin-standard": "^4.0.1", + "file-loader": "^6.0.0", + "handlebars": "^4.7.6", + "imports-loader": "^1.1.0", "jquery": ">=3.4.0", - "js-yaml": ">=3.13.1", - "lodash": "^4.17.13", - "mini-css-extract-plugin": "^0.4.1", + "js-yaml": "^3.14.0", + "lodash": "^4.17.19", + "mini-css-extract-plugin": "0.9.0", "moment-locales-webpack-plugin": "^1.2.0", - "style-loader": "^0.21.0", - "stylelint": "^13.3.1", + "style-loader": "^1.2.1", + "stylelint": "^13.6.1", "stylelint-config-standard": "^20.0.0", - "url-loader": "^1.0.1", + "url-loader": "4.1.0", "webpack": "^4.16.3", "webpack-cli": "^3.1.0", "webpack-manifest-plugin": "^2.2.0" @@ -64,15 +64,15 @@ "d3": "^3.4.4", "d3-tip": "^0.9.1", "dagre-d3": "^0.6.4", - "datatables.net": "^1.10.19", - "datatables.net-bs": "^1.10.19", + "datatables.net": "^1.10.21", + "datatables.net-bs": "^1.10.21", "eonasdan-bootstrap-datetimepicker": "^4.17.47", "jquery": ">=3.4.0", - "js-yaml": ">=3.13.1", - "lodash": "^4.17.13", + "js-yaml": "^3.14.0", + "lodash": "^4.17.19", "moment-timezone": "^0.5.28", "nvd3": "^1.8.6", "redoc": "^2.0.0-rc.30", - "url-search-params-polyfill": "^6.0.0" + "url-search-params-polyfill": "^8.1.0" } } diff --git a/airflow/www/templates/airflow/graph.html b/airflow/www/templates/airflow/graph.html index 8acfeee07d023..f4ec0b62c35b5 100644 --- a/airflow/www/templates/airflow/graph.html +++ b/airflow/www/templates/airflow/graph.html @@ -210,24 +210,42 @@ }); - function highlight_nodes(nodes, color) { + function highlight_nodes(nodes, color, stroke_width) { nodes.forEach (function (nodeid) { my_node = d3.select('[id="' + nodeid + '"]').node().parentNode; - d3.select(my_node).selectAll("rect").style("stroke", color) ; + d3.select(my_node) + .selectAll("rect") + .style("stroke", color) + .style("stroke-width", stroke_width) ; }) } d3.selectAll("g.node").on("mouseover", function(d){ d3.select(this).selectAll("rect").style("stroke", highlight_color) ; - highlight_nodes(g.predecessors(d), upstream_color); - highlight_nodes(g.successors(d), downstream_color) - + highlight_nodes(g.predecessors(d), upstream_color, highlightStrokeWidth); + highlight_nodes(g.successors(d), downstream_color, highlightStrokeWidth) + adjacent_node_names = [d, ...g.predecessors(d), ...g.successors(d)] + d3.selectAll("g.nodes g.node") + .filter(x => !adjacent_node_names.includes(x)) + .style("opacity", 0.2); + adjacent_edges = g.nodeEdges(d) + d3.selectAll("g.edgePath")[0] + .filter(x => !adjacent_edges.includes(x.__data__)) + .forEach(function(x) { + d3.select(x).style('opacity', .2) + }) }); d3.selectAll("g.node").on("mouseout", function(d){ d3.select(this).selectAll("rect").style("stroke", null) ; - highlight_nodes(g.predecessors(d), null) - highlight_nodes(g.successors(d), null) + highlight_nodes(g.predecessors(d), null, initialStrokeWidth) + highlight_nodes(g.successors(d), null, initialStrokeWidth) + d3.selectAll("g.node") + .style("opacity", 1); + d3.selectAll("g.node rect") + .style("stroke-width", initialStrokeWidth); + d3.selectAll("g.edgePath") + .style("opacity", 1); }); diff --git a/airflow/www/webpack.config.js b/airflow/www/webpack.config.js index 9acbe9f224ba9..98bcd04c09b03 100644 --- a/airflow/www/webpack.config.js +++ b/airflow/www/webpack.config.js @@ -19,7 +19,7 @@ const webpack = require('webpack'); const path = require('path'); const ManifestPlugin = require('webpack-manifest-plugin'); -const CleanWebpackPlugin = require('clean-webpack-plugin'); +const cwplg = require('clean-webpack-plugin'); const CopyWebpackPlugin = require('copy-webpack-plugin'); const MiniCssExtractPlugin = require('mini-css-extract-plugin'); const MomentLocalesPlugin = require('moment-locales-webpack-plugin'); @@ -58,12 +58,10 @@ const config = { ], }, module: { - rules: [ - { + rules: [{ test: /datatables\.net.*/, loader: 'imports-loader?define=>false', - }, - { + }, { test: /\.jsx?$/, exclude: /node_modules/, loader: 'babel-loader', @@ -72,30 +70,36 @@ const config = { { test: /\.css$/, include: STATIC_DIR, - use: [ - MiniCssExtractPlugin.loader, + use: [{ + loader: MiniCssExtractPlugin.loader, + options: { + esModule: true, + }, + }, 'css-loader', ], }, /* for css linking images */ { - test: /\.png$/, - loader: 'url-loader?limit=100000', - }, - { - test: /\.jpg$/, - loader: 'file-loader', - }, - { - test: /\.gif$/, - loader: 'file-loader', + test: /\.(png|jpg|gif)$/i, + use: [{ + loader: 'url-loader', + options: { + limit: 100000, + }, + }, ], }, /* for font-awesome */ { test: /\.woff(2)?(\?v=[0-9]\.[0-9]\.[0-9])?$/, - loader: 'url-loader?limit=10000&mimetype=application/font-woff', - }, - { + use: [{ + loader: 'url-loader', + options: { + limit: 100000, + mimetype: 'application/font-woff', + }, + }, ], + }, { test: /\.(ttf|eot|svg)(\?v=[0-9]\.[0-9]\.[0-9])?$/, loader: 'file-loader', }, @@ -103,8 +107,12 @@ const config = { }, plugins: [ new ManifestPlugin(), - new CleanWebpackPlugin(['static/dist']), - new MiniCssExtractPlugin({ filename: '[name].[chunkhash].css' }), + new cwplg.CleanWebpackPlugin({ + verbose: true, + }), + new MiniCssExtractPlugin({ + filename: '[name].[chunkhash].css' + }), // MomentJS loads all the locale, making it a huge JS file. // This will ignore the locales from momentJS @@ -118,23 +126,46 @@ const config = { // Since we have all the dependencies separated from hard-coded JS within HTML, // this seems like an efficient solution for now. Will update that once // we'll have the dependencies imported within the custom JS - new CopyWebpackPlugin([ - { from: 'node_modules/nvd3/build/*.min.*', flatten: true }, - // Update this when upgrade d3 package, as the path in new D3 is different - { from: 'node_modules/d3/d3.min.*', flatten: true }, - { from: 'node_modules/dagre-d3/dist/*.min.*', flatten: true }, - { from: 'node_modules/d3-tip/dist/index.js', to: 'd3-tip.js', flatten: true }, - { from: 'node_modules/bootstrap-3-typeahead/*min.*', flatten: true }, - { - from: 'node_modules/bootstrap-toggle/**/*bootstrap-toggle.min.*', - flatten: true, - }, - { from: 'node_modules/datatables.net/**/**.min.*', flatten: true }, - { from: 'node_modules/datatables.net-bs/**/**.min.*', flatten: true }, - { from: 'node_modules/eonasdan-bootstrap-datetimepicker/build/css/bootstrap-datetimepicker.min.css', flatten: true }, - { from: 'node_modules/eonasdan-bootstrap-datetimepicker/build/js/bootstrap-datetimepicker.min.js', flatten: true }, - { from: 'node_modules/redoc/bundles/redoc.standalone.*', flatten: true }, - ], { copyUnmodified: true }), + new CopyWebpackPlugin({ + patterns: [{ + from: 'node_modules/nvd3/build/*.min.*', + flatten: true + }, + // Update this when upgrade d3 package, as the path in new D3 is different + { + from: 'node_modules/d3/d3.min.*', + flatten: true + }, { + from: 'node_modules/dagre-d3/dist/*.min.*', + flatten: true + }, { + from: 'node_modules/d3-tip/dist/index.js', + to: 'd3-tip.js', + flatten: true + }, { + from: 'node_modules/bootstrap-3-typeahead/*min.*', + flatten: true + }, { + from: 'node_modules/bootstrap-toggle/**/*bootstrap-toggle.min.*', + flatten: true, + }, { + from: 'node_modules/datatables.net/**/**.min.*', + flatten: true + }, { + from: 'node_modules/datatables.net-bs/**/**.min.*', + flatten: true + }, { + from: 'node_modules/eonasdan-bootstrap-datetimepicker/build/css/bootstrap-datetimepicker.min.css', + flatten: true + }, { + from: 'node_modules/eonasdan-bootstrap-datetimepicker/build/js/bootstrap-datetimepicker.min.js', + flatten: true + }, { + from: 'node_modules/redoc/bundles/redoc.standalone.*', + flatten: true + }, + ], + }), ], }; diff --git a/airflow/www/yarn.lock b/airflow/www/yarn.lock index 9e30186908272..760afd0469ed0 100644 --- a/airflow/www/yarn.lock +++ b/airflow/www/yarn.lock @@ -2,14 +2,14 @@ # yarn lockfile v1 -"@babel/code-frame@7.0.0-beta.44": - version "7.0.0-beta.44" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.0.0-beta.44.tgz#2a02643368de80916162be70865c97774f3adbd9" - integrity sha512-cuAuTTIQ9RqcFRJ/Y8PvTh+paepNcaGxwQwjIDRWPXmzzyAeCO4KqS9ikMvq0MCbRk6GlYKwfzStrcP3/jSL8g== +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.10.4.tgz#168da1a36e90da68ae8d49c0f1b48c7c6249213a" + integrity sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg== dependencies: - "@babel/highlight" "7.0.0-beta.44" + "@babel/highlight" "^7.10.4" -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.8.3": +"@babel/code-frame@^7.8.3": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.8.3.tgz#33e25903d7481181534e12ec0a25f16b6fcf419e" integrity sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g== @@ -38,16 +38,14 @@ semver "^5.4.1" source-map "^0.5.0" -"@babel/generator@7.0.0-beta.44": - version "7.0.0-beta.44" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.0.0-beta.44.tgz#c7e67b9b5284afcf69b309b50d7d37f3e5033d42" - integrity sha512-5xVb7hlhjGcdkKpMXgicAVgx8syK5VJz193k0i/0sLP6DzE6lRrU1K3B/rFefgdo9LPGMAOOOAWW4jycj07ShQ== +"@babel/generator@^7.10.5": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.10.5.tgz#1b903554bc8c583ee8d25f1e8969732e6b829a69" + integrity sha512-3vXxr3FEW7E7lJZiWQ3bM4+v/Vyr9C+hpolQ8BGFr9Y8Ri2tFLWTixmwKBafDujO1WVah4fhZBeU1bieKdghig== dependencies: - "@babel/types" "7.0.0-beta.44" + "@babel/types" "^7.10.5" jsesc "^2.5.1" - lodash "^4.2.0" source-map "^0.5.0" - trim-right "^1.0.1" "@babel/generator@^7.9.0", "@babel/generator@^7.9.5": version "7.9.5" @@ -59,14 +57,14 @@ lodash "^4.17.13" source-map "^0.5.0" -"@babel/helper-function-name@7.0.0-beta.44": - version "7.0.0-beta.44" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.0.0-beta.44.tgz#e18552aaae2231100a6e485e03854bc3532d44dd" - integrity sha512-MHRG2qZMKMFaBavX0LWpfZ2e+hLloT++N7rfM3DYOMUOGCD8cVjqZpwiL8a0bOX3IYcQev1ruciT0gdFFRTxzg== +"@babel/helper-function-name@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.10.4.tgz#d2d3b20c59ad8c47112fa7d2a94bc09d5ef82f1a" + integrity sha512-YdaSyz1n8gY44EmN7x44zBn9zQ1Ry2Y+3GTA+3vH6Mizke1Vw0aWDM66FOYEPw8//qKkmqOckrGgTYa+6sceqQ== dependencies: - "@babel/helper-get-function-arity" "7.0.0-beta.44" - "@babel/template" "7.0.0-beta.44" - "@babel/types" "7.0.0-beta.44" + "@babel/helper-get-function-arity" "^7.10.4" + "@babel/template" "^7.10.4" + "@babel/types" "^7.10.4" "@babel/helper-function-name@^7.9.5": version "7.9.5" @@ -77,12 +75,12 @@ "@babel/template" "^7.8.3" "@babel/types" "^7.9.5" -"@babel/helper-get-function-arity@7.0.0-beta.44": - version "7.0.0-beta.44" - resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.0.0-beta.44.tgz#d03ca6dd2b9f7b0b1e6b32c56c72836140db3a15" - integrity sha512-w0YjWVwrM2HwP6/H3sEgrSQdkCaxppqFeJtAnB23pRiJB5E/O9Yp7JAAeWBl+gGEgmBFinnTyOv2RN7rcSmMiw== +"@babel/helper-get-function-arity@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.10.4.tgz#98c1cbea0e2332f33f9a4661b8ce1505b2c19ba2" + integrity sha512-EkN3YDB+SRDgiIUnNgcmiD361ti+AVbL3f3Henf6dqqUyr5dMsorno0lJWJuLhDhkI5sYEpgj6y9kB8AOU1I2A== dependencies: - "@babel/types" "7.0.0-beta.44" + "@babel/types" "^7.10.4" "@babel/helper-get-function-arity@^7.8.3": version "7.8.3" @@ -98,6 +96,13 @@ dependencies: "@babel/types" "^7.8.3" +"@babel/helper-module-imports@^7.0.0": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.10.4.tgz#4c5c54be04bd31670a7382797d75b9fa2e5b5620" + integrity sha512-nEQJHqYavI217oD9+s5MUBzk6x1IlvoS9WTPfgG43CbMEeStE0v+r+TucWdx8KFGowPGvyOkDT9+7DHedIDnVw== + dependencies: + "@babel/types" "^7.10.4" + "@babel/helper-module-imports@^7.8.3": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.8.3.tgz#7fe39589b39c016331b6b8c3f441e8f0b1419498" @@ -143,12 +148,12 @@ "@babel/template" "^7.8.3" "@babel/types" "^7.8.3" -"@babel/helper-split-export-declaration@7.0.0-beta.44": - version "7.0.0-beta.44" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.0.0-beta.44.tgz#c0b351735e0fbcb3822c8ad8db4e583b05ebd9dc" - integrity sha512-aQ7QowtkgKKzPGf0j6u77kBMdUFVBKNHw2p/3HX/POt5/oz8ec5cs0GwlgM8Hz7ui5EwJnzyfRmkNF1Nx1N7aA== +"@babel/helper-split-export-declaration@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.10.4.tgz#2c70576eaa3b5609b24cb99db2888cc3fc4251d1" + integrity sha512-pySBTeoUff56fL5CBU2hWm9TesA4r/rOkI9DyJLvvgz09MB9YtfIYe3iBriVaYNaPe+Alua0vBIOVOLs2buWhg== dependencies: - "@babel/types" "7.0.0-beta.44" + "@babel/types" "^7.10.4" "@babel/helper-split-export-declaration@^7.8.3": version "7.8.3" @@ -157,7 +162,12 @@ dependencies: "@babel/types" "^7.8.3" -"@babel/helper-validator-identifier@^7.9.0", "@babel/helper-validator-identifier@^7.9.5": +"@babel/helper-validator-identifier@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.4.tgz#a78c7a7251e01f616512d31b10adcf52ada5e0d2" + integrity sha512-3U9y+43hz7ZM+rzG24Qe2mufW5KhvFg/NhnNph+i9mgCtdTCtMJuI1TMkrIUiK7Ix4PYlRF9I5dhqaLYA/ADXw== + +"@babel/helper-validator-identifier@^7.9.5": version "7.9.5" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.9.5.tgz#90977a8e6fbf6b431a7dc31752eee233bf052d80" integrity sha512-/8arLKUFq882w4tWGj9JYzRpAlZgiWUJ+dtteNTDqrRBz9Iguck9Rn3ykuBDoUwh2TO4tSAJlrxDUOXWklJe4g== @@ -171,33 +181,29 @@ "@babel/traverse" "^7.9.0" "@babel/types" "^7.9.0" -"@babel/highlight@7.0.0-beta.44": - version "7.0.0-beta.44" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.0.0-beta.44.tgz#18c94ce543916a80553edcdcf681890b200747d5" - integrity sha512-Il19yJvy7vMFm8AVAh6OZzaFoAd0hbkeMZiX3P5HGD+z7dyI7RzndHB0dg6Urh/VAFfHtpOIzDUSxmY6coyZWQ== - dependencies: - chalk "^2.0.0" - esutils "^2.0.2" - js-tokens "^3.0.0" - -"@babel/highlight@^7.8.3": - version "7.9.0" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.9.0.tgz#4e9b45ccb82b79607271b2979ad82c7b68163079" - integrity sha512-lJZPilxX7Op3Nv/2cvFdnlepPXDxi29wxteT57Q965oc5R9v86ztx0jfxVrTcBk8C2kcPkkDa2Z4T3ZsPPVWsQ== +"@babel/highlight@^7.10.4", "@babel/highlight@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.10.4.tgz#7d1bdfd65753538fabe6c38596cdb76d9ac60143" + integrity sha512-i6rgnR/YgPEQzZZnbTHHuZdlE8qyoBNalD6F+q4vAFlcMEcqmkoG+mPqJYJCo63qPf74+Y1UZsl3l6f7/RIkmA== dependencies: - "@babel/helper-validator-identifier" "^7.9.0" + "@babel/helper-validator-identifier" "^7.10.4" chalk "^2.0.0" js-tokens "^4.0.0" +"@babel/parser@^7.10.4", "@babel/parser@^7.10.5", "@babel/parser@^7.7.0": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.10.5.tgz#e7c6bf5a7deff957cec9f04b551e2762909d826b" + integrity sha512-wfryxy4bE1UivvQKSQDU4/X6dr+i8bctjUjj8Zyt3DQy7NtPizJXT8M52nqpNKL+nq2PW8lxk4ZqLj0fD4B4hQ== + "@babel/parser@^7.8.6", "@babel/parser@^7.9.0": version "7.9.4" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.9.4.tgz#68a35e6b0319bbc014465be43828300113f2f2e8" integrity sha512-bC49otXX6N0/VYhgOMh4gnP26E9xnDZK3TmbNpxYzzz9BQLBosQwfyOe9/cXUU3txYhTzLCbcqd5c8y/OmCjHA== -"@babel/runtime@^7.0.0", "@babel/runtime@^7.9.2": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.10.3.tgz#670d002655a7c366540c67f6fd3342cd09500364" - integrity sha512-RzGO0RLSdokm9Ipe/YD+7ww8X2Ro79qiXZF3HU9ljrM+qnJmH1Vqth+hbiQZy761LnMJTMitHDuKVYTk3k4dLw== +"@babel/runtime@^7.0.0", "@babel/runtime@^7.7.2", "@babel/runtime@^7.9.2": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.10.5.tgz#303d8bd440ecd5a491eae6117fd3367698674c5c" + integrity sha512-otddXKhdNn7d0ptoFRHtMLa8LqDxLYwTjB4nYgM1yy5N6gU/MUf8zqyyLltCH3yAVitBzmwK4us+DD0l/MauAg== dependencies: regenerator-runtime "^0.13.4" @@ -208,15 +214,14 @@ dependencies: regenerator-runtime "^0.13.4" -"@babel/template@7.0.0-beta.44": - version "7.0.0-beta.44" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.0.0-beta.44.tgz#f8832f4fdcee5d59bf515e595fc5106c529b394f" - integrity sha512-w750Sloq0UNifLx1rUqwfbnC6uSUk0mfwwgGRfdLiaUzfAOiH0tHJE6ILQIUi3KYkjiCDTskoIsnfqZvWLBDng== +"@babel/template@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.10.4.tgz#3251996c4200ebc71d1a8fc405fba940f36ba278" + integrity sha512-ZCjD27cGJFUB6nmCB1Enki3r+L5kJveX9pq1SvAUKoICy6CZ9yD8xO086YXdYhvNjBdnekm4ZnaP5yC8Cs/1tA== dependencies: - "@babel/code-frame" "7.0.0-beta.44" - "@babel/types" "7.0.0-beta.44" - babylon "7.0.0-beta.44" - lodash "^4.2.0" + "@babel/code-frame" "^7.10.4" + "@babel/parser" "^7.10.4" + "@babel/types" "^7.10.4" "@babel/template@^7.8.3", "@babel/template@^7.8.6": version "7.8.6" @@ -227,21 +232,20 @@ "@babel/parser" "^7.8.6" "@babel/types" "^7.8.6" -"@babel/traverse@7.0.0-beta.44": - version "7.0.0-beta.44" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.0.0-beta.44.tgz#a970a2c45477ad18017e2e465a0606feee0d2966" - integrity sha512-UHuDz8ukQkJCDASKHf+oDt3FVUzFd+QYfuBIsiNu/4+/ix6pP/C+uQZJ6K1oEfbCMv/IKWbgDEh7fcsnIE5AtA== - dependencies: - "@babel/code-frame" "7.0.0-beta.44" - "@babel/generator" "7.0.0-beta.44" - "@babel/helper-function-name" "7.0.0-beta.44" - "@babel/helper-split-export-declaration" "7.0.0-beta.44" - "@babel/types" "7.0.0-beta.44" - babylon "7.0.0-beta.44" - debug "^3.1.0" +"@babel/traverse@^7.7.0": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.10.5.tgz#77ce464f5b258be265af618d8fddf0536f20b564" + integrity sha512-yc/fyv2gUjPqzTz0WHeRJH2pv7jA9kA7mBX2tXl/x5iOE81uaVPuGPtaYk7wmkx4b67mQ7NqI8rmT2pF47KYKQ== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/generator" "^7.10.5" + "@babel/helper-function-name" "^7.10.4" + "@babel/helper-split-export-declaration" "^7.10.4" + "@babel/parser" "^7.10.5" + "@babel/types" "^7.10.5" + debug "^4.1.0" globals "^11.1.0" - invariant "^2.2.0" - lodash "^4.2.0" + lodash "^4.17.19" "@babel/traverse@^7.8.6", "@babel/traverse@^7.9.0": version "7.9.5" @@ -258,13 +262,13 @@ globals "^11.1.0" lodash "^4.17.13" -"@babel/types@7.0.0-beta.44": - version "7.0.0-beta.44" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.0.0-beta.44.tgz#6b1b164591f77dec0a0342aca995f2d046b3a757" - integrity sha512-5eTV4WRmqbaFM3v9gHAIljEQJU4Ssc6fxL61JN+Oe2ga/BwyjzjamwkCVVAQjHGuAX8i0BWo42dshL8eO5KfLQ== +"@babel/types@^7.10.4", "@babel/types@^7.10.5", "@babel/types@^7.7.0": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.10.5.tgz#d88ae7e2fde86bfbfe851d4d81afa70a997b5d15" + integrity sha512-ixV66KWfCI6GKoA/2H9v6bQdbfXEwwpOdQ8cRvb4F+eyvhlaHxWFMQB4+3d9QFJXZsiiiqVrewNV0DFEQpyT4Q== dependencies: - esutils "^2.0.2" - lodash "^4.2.0" + "@babel/helper-validator-identifier" "^7.10.4" + lodash "^4.17.19" to-fast-properties "^2.0.0" "@babel/types@^7.8.3", "@babel/types@^7.8.6", "@babel/types@^7.9.0", "@babel/types@^7.9.5": @@ -276,6 +280,53 @@ lodash "^4.17.13" to-fast-properties "^2.0.0" +"@emotion/babel-utils@^0.6.4": + version "0.6.10" + resolved "https://registry.yarnpkg.com/@emotion/babel-utils/-/babel-utils-0.6.10.tgz#83dbf3dfa933fae9fc566e54fbb45f14674c6ccc" + integrity sha512-/fnkM/LTEp3jKe++T0KyTszVGWNKPNOUJfjNKLO17BzQ6QPxgbg3whayom1Qr2oLFH3V92tDymU+dT5q676uow== + dependencies: + "@emotion/hash" "^0.6.6" + "@emotion/memoize" "^0.6.6" + "@emotion/serialize" "^0.9.1" + convert-source-map "^1.5.1" + find-root "^1.1.0" + source-map "^0.7.2" + +"@emotion/hash@^0.6.2", "@emotion/hash@^0.6.6": + version "0.6.6" + resolved "https://registry.yarnpkg.com/@emotion/hash/-/hash-0.6.6.tgz#62266c5f0eac6941fece302abad69f2ee7e25e44" + integrity sha512-ojhgxzUHZ7am3D2jHkMzPpsBAiB005GF5YU4ea+8DNPybMk01JJUM9V9YRlF/GE95tcOm8DxQvWA2jq19bGalQ== + +"@emotion/memoize@^0.6.1", "@emotion/memoize@^0.6.6": + version "0.6.6" + resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.6.6.tgz#004b98298d04c7ca3b4f50ca2035d4f60d2eed1b" + integrity sha512-h4t4jFjtm1YV7UirAFuSuFGyLa+NNxjdkq6DpFLANNQY5rHueFZHVY+8Cu1HYVP6DrheB0kv4m5xPjo7eKT7yQ== + +"@emotion/serialize@^0.9.1": + version "0.9.1" + resolved "https://registry.yarnpkg.com/@emotion/serialize/-/serialize-0.9.1.tgz#a494982a6920730dba6303eb018220a2b629c145" + integrity sha512-zTuAFtyPvCctHBEL8KZ5lJuwBanGSutFEncqLn/m9T1a6a93smBStK+bZzcNPgj4QS8Rkw9VTwJGhRIUVO8zsQ== + dependencies: + "@emotion/hash" "^0.6.6" + "@emotion/memoize" "^0.6.6" + "@emotion/unitless" "^0.6.7" + "@emotion/utils" "^0.8.2" + +"@emotion/stylis@^0.7.0": + version "0.7.1" + resolved "https://registry.yarnpkg.com/@emotion/stylis/-/stylis-0.7.1.tgz#50f63225e712d99e2b2b39c19c70fff023793ca5" + integrity sha512-/SLmSIkN13M//53TtNxgxo57mcJk/UJIDFRKwOiLIBEyBHEcipgR6hNMQ/59Sl4VjCJ0Z/3zeAZyvnSLPG/1HQ== + +"@emotion/unitless@^0.6.2", "@emotion/unitless@^0.6.7": + version "0.6.7" + resolved "https://registry.yarnpkg.com/@emotion/unitless/-/unitless-0.6.7.tgz#53e9f1892f725b194d5e6a1684a7b394df592397" + integrity sha512-Arj1hncvEVqQ2p7Ega08uHLr1JuRYBuO5cIvcA+WWEQ5+VmkOE3ZXzl04NbQxeQpWX78G7u6MqxKuNX3wvYZxg== + +"@emotion/utils@^0.8.2": + version "0.8.2" + resolved "https://registry.yarnpkg.com/@emotion/utils/-/utils-0.8.2.tgz#576ff7fb1230185b619a75d258cbc98f0867a8dc" + integrity sha512-rLu3wcBWH4P5q1CGoSSH/i9hrXs7SlbRLkoq9IGuoPYNGQvDJ3pt/wmOM+XgYjIDRMVIdkUWt0RsfzF50JfnCw== + "@nodelib/fs.scandir@2.1.3": version "2.1.3" resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.3.tgz#3a582bdb53804c6ba6d146579c46e52130cf4a3b" @@ -297,10 +348,17 @@ "@nodelib/fs.scandir" "2.1.3" fastq "^1.6.0" -"@stylelint/postcss-css-in-js@^0.37.0": - version "0.37.0" - resolved "https://registry.yarnpkg.com/@stylelint/postcss-css-in-js/-/postcss-css-in-js-0.37.0.tgz#199df63fe8d876d74656c8dec3b17e8474ca866e" - integrity sha512-9svhg0rpUOo4xkvnllUzM0ZQF/Iwxhi6Bf1rzOA06fDa+fjnBUb2mvEV1c9nJb14g1XD/HMSmvklaVyCo96x6A== +"@npmcli/move-file@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@npmcli/move-file/-/move-file-1.0.1.tgz#de103070dac0f48ce49cf6693c23af59c0f70464" + integrity sha512-Uv6h1sT+0DrblvIrolFtbvM1FgWm+/sy4B3pvLp67Zys+thcukzS5ekn7HsZFGpWP4Q3fYJCljbWQE/XivMRLw== + dependencies: + mkdirp "^1.0.4" + +"@stylelint/postcss-css-in-js@^0.37.1": + version "0.37.2" + resolved "https://registry.yarnpkg.com/@stylelint/postcss-css-in-js/-/postcss-css-in-js-0.37.2.tgz#7e5a84ad181f4234a2480803422a47b8749af3d2" + integrity sha512-nEhsFoJurt8oUmieT8qy4nk81WRHmJynmVwn/Vts08PL9fhgIsMhk1GId5yAN643OzqEEb5S/6At2TZW7pqPDA== dependencies: "@babel/core" ">=7.9.0" @@ -312,20 +370,53 @@ remark "^12.0.0" unist-util-find-all-after "^3.0.1" +"@types/anymatch@*": + version "1.3.1" + resolved "https://registry.yarnpkg.com/@types/anymatch/-/anymatch-1.3.1.tgz#336badc1beecb9dacc38bea2cf32adf627a8421a" + integrity sha512-/+CRPXpBDpo2RK9C68N3b2cOvO0Cf5B9aPijHsoDQTHivnGSObdOF2BRQOYjojWTDy6nQvMjmqRXIxH55VjxxA== + "@types/color-name@^1.1.1": version "1.1.1" resolved "https://registry.yarnpkg.com/@types/color-name/-/color-name-1.1.1.tgz#1c1261bbeaa10a8055bbc5d8ab84b7b2afc846a0" integrity sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ== +"@types/glob@^7.1.1": + version "7.1.3" + resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.3.tgz#e6ba80f36b7daad2c685acd9266382e68985c183" + integrity sha512-SEYeGAIQIQX8NN6LDKprLjbrd5dARM5EXsd8GI/A5l0apYI1fGMWgPHSe4ZKL4eozlAyI+doUE9XbYS4xCkQ1w== + dependencies: + "@types/minimatch" "*" + "@types/node" "*" + +"@types/json-schema@^7.0.4": + version "7.0.5" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.5.tgz#dcce4430e64b443ba8945f0290fb564ad5bac6dd" + integrity sha512-7+2BITlgjgDhH0vvwZU/HZJVyk+2XUlvxXe8dFMedNX/aMkaOq++rMAFXc0tM7ij15QaWlbdQASBR9dihi+bDQ== + +"@types/json5@^0.0.29": + version "0.0.29" + resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4= + +"@types/minimatch@*": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" + integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== + "@types/minimist@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@types/minimist/-/minimist-1.2.0.tgz#69a23a3ad29caf0097f06eda59b361ee2f0639f6" integrity sha1-aaI6OtKcrwCX8G7aWbNh7i8GOfY= +"@types/node@*": + version "14.0.24" + resolved "https://registry.yarnpkg.com/@types/node/-/node-14.0.24.tgz#b0f86f58564fa02a28b68f8b55d4cdec42e3b9d6" + integrity sha512-btt/oNOiDWcSuI721MdL8VQGnjsKjlTMdrKyTcLCKeQp/n4AAMFJ961wMbp+09y8WuGPClDEv07RIItdXKIXAA== + "@types/node@^13.11.1": - version "13.13.12" - resolved "https://registry.yarnpkg.com/@types/node/-/node-13.13.12.tgz#9c72e865380a7dc99999ea0ef20fc9635b503d20" - integrity sha512-zWz/8NEPxoXNT9YyF2osqyA9WjssZukYpgI4UYZpOjcyqwIUqWGkcCionaEb9Ki+FULyPyvNFpg/329Kd2/pbw== + version "13.13.14" + resolved "https://registry.yarnpkg.com/@types/node/-/node-13.13.14.tgz#20cd7d2a98f0c3b08d379f4ea9e6b315d2019529" + integrity sha512-Az3QsOt1U/K1pbCQ0TXGELTuTkPLOiFIQf3ILzbOyo0FqgV9SxRnxbxM5QlAveERZMHpZY+7u3Jz2tKyl+yg6g== "@types/normalize-package-data@^2.4.0": version "2.4.0" @@ -337,11 +428,49 @@ resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== +"@types/source-list-map@*": + version "0.1.2" + resolved "https://registry.yarnpkg.com/@types/source-list-map/-/source-list-map-0.1.2.tgz#0078836063ffaf17412349bba364087e0ac02ec9" + integrity sha512-K5K+yml8LTo9bWJI/rECfIPrGgxdpeNbj+d53lwN4QjW1MCwlkhUms+gtdzigTeUyBr09+u8BwOIY3MXvHdcsA== + +"@types/tapable@*": + version "1.0.6" + resolved "https://registry.yarnpkg.com/@types/tapable/-/tapable-1.0.6.tgz#a9ca4b70a18b270ccb2bc0aaafefd1d486b7ea74" + integrity sha512-W+bw9ds02rAQaMvaLYxAbJ6cvguW/iJXNT6lTssS1ps6QdrMKttqEAMEG/b5CR8TZl3/L7/lH0ZV5nNR1LXikA== + +"@types/uglify-js@*": + version "3.9.3" + resolved "https://registry.yarnpkg.com/@types/uglify-js/-/uglify-js-3.9.3.tgz#d94ed608e295bc5424c9600e6b8565407b6b4b6b" + integrity sha512-KswB5C7Kwduwjj04Ykz+AjvPcfgv/37Za24O2EDzYNbwyzOo8+ydtvzUfZ5UMguiVu29Gx44l1A6VsPPcmYu9w== + dependencies: + source-map "^0.6.1" + "@types/unist@^2.0.0", "@types/unist@^2.0.2": version "2.0.3" resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.3.tgz#9c088679876f374eb5983f150d4787aa6fb32d7e" integrity sha512-FvUupuM3rlRsRtCN+fDudtmytGO6iHJuuRKS1Ss0pG5z8oX0diNEw94UEL7hgDbpN94rgaK5R7sWm6RrSkZuAQ== +"@types/webpack-sources@*": + version "1.4.0" + resolved "https://registry.yarnpkg.com/@types/webpack-sources/-/webpack-sources-1.4.0.tgz#e58f1f05f87d39a5c64cf85705bdbdbb94d4d57e" + integrity sha512-c88dKrpSle9BtTqR6ifdaxu1Lvjsl3C5OsfvuUbUwdXymshv1TkufUAXBajCCUM/f/TmnkZC/Esb03MinzSiXQ== + dependencies: + "@types/node" "*" + "@types/source-list-map" "*" + source-map "^0.7.3" + +"@types/webpack@^4.4.31": + version "4.41.21" + resolved "https://registry.yarnpkg.com/@types/webpack/-/webpack-4.41.21.tgz#cc685b332c33f153bb2f5fc1fa3ac8adeb592dee" + integrity sha512-2j9WVnNrr/8PLAB5csW44xzQSJwS26aOnICsP3pSGCEdsu6KYtfQ6QJsVUKHWRnm1bL7HziJsfh5fHqth87yKA== + dependencies: + "@types/anymatch" "*" + "@types/node" "*" + "@types/tapable" "*" + "@types/uglify-js" "*" + "@types/webpack-sources" "*" + source-map "^0.6.0" + "@webassemblyjs/ast@1.9.0": version "1.9.0" resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.9.0.tgz#bd850604b4042459a5a41cd7d338cbed695ed964" @@ -497,44 +626,45 @@ resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== -acorn-jsx@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-3.0.1.tgz#afdf9488fb1ecefc8348f6fb22f464e32a58b36b" - integrity sha1-r9+UiPsezvyDSPb7IvRk4ypYs2s= - dependencies: - acorn "^3.0.4" - -acorn@^3.0.4: - version "3.3.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a" - integrity sha1-ReN/s56No/JbruP/U2niu18iAXo= +abbrev@1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" + integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== -acorn@^5.5.0: - version "5.7.3" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.3.tgz#67aa231bf8812974b85235a96771eb6bd07ea279" - integrity sha512-T/zvzYRfbVojPWahDsE5evJdHb3oJoQfFbsrKM7w5Zcs++Tr257tia3BmMP8XYVjp1S9RZXQMh7gao96BlqZOw== +acorn-jsx@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.2.0.tgz#4c66069173d6fdd68ed85239fc256226182b2ebe" + integrity sha512-HiUX/+K2YpkpJ+SzBffkM/AQ2YE03S0U1kjTLVpoJdhZMOWy8qvXVN9JdLqv2QsaQ6MPYQIuNmwD8zOiYUofLQ== -acorn@^6.2.1: +acorn@^6.4.1: version "6.4.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.1.tgz#531e58ba3f51b9dacb9a6646ca4debf5b14ca474" integrity sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA== +acorn@^7.3.1: + version "7.3.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.3.1.tgz#85010754db53c3fbaf3b9ea3e083aa5c5d147ffd" + integrity sha512-tLc0wSnatxAQHVHUapaHdz72pi9KUyHjq5KyHjGg9Y8Ifdc79pTh2XvI6I1/chZbnM7QtNKzh66ooDogPZSleA== + +aggregate-error@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.0.1.tgz#db2fe7246e536f40d9b5442a39e117d7dd6a24e0" + integrity sha512-quoaXsZ9/BLNae5yiNoUz+Nhkwz83GhWwtYFglcjEQB2NDHCIpApbqXxIFnm4Pq/Nvhrsq5sYJFyohrrxnTGAA== + dependencies: + clean-stack "^2.0.0" + indent-string "^4.0.0" + ajv-errors@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== -ajv-keywords@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-2.1.1.tgz#617997fc5f60576894c435f940d819e135b80762" - integrity sha1-YXmX/F9gV2iUxDX5QNgZ4TW4B2I= - ajv-keywords@^3.1.0, ajv-keywords@^3.4.1: - version "3.4.1" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.4.1.tgz#ef916e271c64ac12171fd8384eaae6b2345854da" - integrity sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ== + version "3.5.1" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.1.tgz#b83ca89c5d42d69031f424cad49aada0236c6957" + integrity sha512-KWcq3xN8fDjSB+IMoh2VaXVhRI0BBGxoYp3rx7Pkb6z0cFjYR9Q9l4yZqqals0/zsioCmocC5H6UvsGD4MoIBA== -ajv@^5.2.3, ajv@^5.3.0, ajv@^5.5.2: +ajv@^5.5.2: version "5.5.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" integrity sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU= @@ -544,25 +674,20 @@ ajv@^5.2.3, ajv@^5.3.0, ajv@^5.5.2: fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.3.0" -ajv@^6.1.0, ajv@^6.10.2: - version "6.10.2" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.10.2.tgz#d3cea04d6b017b2894ad69040fec8b623eb4bd52" - integrity sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw== +ajv@^6.1.0, ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.2: + version "6.12.3" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.3.tgz#18c5af38a111ddeb4f2697bd78d68abc1cabd706" + integrity sha512-4K0cK3L1hsqk9xIb2z9vs/XU+PGJZ9PNpJRDS9YLzmNdX6jmVPfamLvTJr0aDAusnHyCHO6MjzlkAsgtqp9teA== dependencies: - fast-deep-equal "^2.0.1" + fast-deep-equal "^3.1.1" fast-json-stable-stringify "^2.0.0" json-schema-traverse "^0.4.1" uri-js "^4.2.2" -ansi-colors@^3.0.0: - version "3.2.4" - resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" - integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== - -ansi-escapes@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" - integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== +ansi-colors@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348" + integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== ansi-regex@^2.0.0: version "2.1.1" @@ -612,6 +737,14 @@ anymatch@^2.0.0: micromatch "^3.1.4" normalize-path "^2.1.1" +anymatch@~3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142" + integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + aproba@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" @@ -639,13 +772,14 @@ arr-union@^3.1.0: resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= -array-includes@^3.0.3: - version "3.1.0" - resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.0.tgz#48a929ef4c6bb1fa6dc4a92c9b023a261b0ca404" - integrity sha512-ONOEQoKrvXPKk7Su92Co0YMqYO32FfqJTzkKU9u2UpIXyYZIzLSvpdg4AwvSw4mSUW0czu6inK+zby6Oj6gDjQ== +array-includes@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.1.tgz#cdd67e6852bdf9c1215460786732255ed2459348" + integrity sha512-c2VXaCHl7zPsvpkFsw4nxvFie4fh1ur9bpcgsVkIjqn0H/Xwdg+7fv3n2r/isyS8EBj5b06M9kHyZuIr4El6WQ== dependencies: define-properties "^1.1.3" - es-abstract "^1.17.0-next.0" + es-abstract "^1.17.0" + is-string "^1.0.5" array-union@^1.0.1: version "1.0.2" @@ -669,7 +803,7 @@ array-unique@^0.3.2: resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= -array.prototype.flat@^1.2.1: +array.prototype.flat@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.2.3.tgz#0de82b426b0318dbfdb940089e38b043d37f6c7b" integrity sha512-gBlRZV0VSmfPIeWfuuy56XZMvbVfbEUnOXUvt3F/eUUUSyzlgLxhEX4YAEpxNAogRGehPSnfXyPtYyKAhkzQhQ== @@ -682,6 +816,11 @@ arrify@^1.0.1: resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" integrity sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0= +arrify@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/arrify/-/arrify-2.0.1.tgz#c9655e9331e0abcd588d2a7cad7e9956f66701fa" + integrity sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug== + asn1.js@^4.0.0: version "4.10.1" resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0" @@ -714,25 +853,25 @@ async-each@^1.0.1: resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== -atob@^2.1.1: +atob@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== -autoprefixer@^9.7.5: - version "9.7.6" - resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.7.6.tgz#63ac5bbc0ce7934e6997207d5bb00d68fa8293a4" - integrity sha512-F7cYpbN7uVVhACZTeeIeealwdGM6wMtfWARVLTy5xmKtgVdBNJvbDRoCK3YO1orcs7gv/KwYlb3iXwu9Ug9BkQ== +autoprefixer@^9.8.0: + version "9.8.5" + resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.8.5.tgz#2c225de229ddafe1d1424c02791d0c3e10ccccaa" + integrity sha512-C2p5KkumJlsTHoNv9w31NrBRgXhf6eCMteJuHZi2xhkgC+5Vm40MEtCKPhc0qdgAOhox0YPy1SQHTAky05UoKg== dependencies: - browserslist "^4.11.1" - caniuse-lite "^1.0.30001039" - chalk "^2.4.2" + browserslist "^4.12.0" + caniuse-lite "^1.0.30001097" + colorette "^1.2.0" normalize-range "^0.1.2" num2fraction "^1.2.2" - postcss "^7.0.27" - postcss-value-parser "^4.0.3" + postcss "^7.0.32" + postcss-value-parser "^4.1.0" -babel-code-frame@^6.22.0, babel-code-frame@^6.26.0: +babel-code-frame@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= @@ -766,17 +905,17 @@ babel-core@^6.26.0, babel-core@^6.26.3: slash "^1.0.0" source-map "^0.5.7" -babel-eslint@^8.2.6: - version "8.2.6" - resolved "https://registry.yarnpkg.com/babel-eslint/-/babel-eslint-8.2.6.tgz#6270d0c73205628067c0f7ae1693a9e797acefd9" - integrity sha512-aCdHjhzcILdP8c9lej7hvXKvQieyRt20SF102SIGyY4cUIiw6UaAtK4j2o3dXX74jEmy0TJ0CEhv4fTIM3SzcA== +babel-eslint@^10.1.0: + version "10.1.0" + resolved "https://registry.yarnpkg.com/babel-eslint/-/babel-eslint-10.1.0.tgz#6968e568a910b78fb3779cdd8b6ac2f479943232" + integrity sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg== dependencies: - "@babel/code-frame" "7.0.0-beta.44" - "@babel/traverse" "7.0.0-beta.44" - "@babel/types" "7.0.0-beta.44" - babylon "7.0.0-beta.44" - eslint-scope "3.7.1" + "@babel/code-frame" "^7.0.0" + "@babel/parser" "^7.7.0" + "@babel/traverse" "^7.7.0" + "@babel/types" "^7.7.0" eslint-visitor-keys "^1.0.0" + resolve "^1.12.0" babel-generator@^6.26.0: version "6.26.1" @@ -800,14 +939,16 @@ babel-helpers@^6.24.1: babel-runtime "^6.22.0" babel-template "^6.24.1" -babel-loader@^7.1.4: - version "7.1.5" - resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-7.1.5.tgz#e3ee0cd7394aa557e013b02d3e492bfd07aa6d68" - integrity sha512-iCHfbieL5d1LfOQeeVJEUyD9rTwBcP/fcEbRCfempxTDuqrKpu0AZjLAQHEQa3Yqyj9ORKe2iHfoj4rHLf7xpw== +babel-loader@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.1.0.tgz#c611d5112bd5209abe8b9fa84c3e4da25275f1c3" + integrity sha512-7q7nC1tYOrqvUrN3LQK4GwSk/TQorZSOlO9C+RZDZpODgyN4ZlCqE5q9cDsyWOliN+aU9B4JX01xK9eJXowJLw== dependencies: - find-cache-dir "^1.0.0" - loader-utils "^1.0.2" - mkdirp "^0.5.1" + find-cache-dir "^2.1.0" + loader-utils "^1.4.0" + mkdirp "^0.5.3" + pify "^4.0.1" + schema-utils "^2.6.5" babel-messages@^6.23.0: version "6.23.0" @@ -824,6 +965,38 @@ babel-plugin-css-modules-transform@^1.6.1: css-modules-require-hook "^4.0.6" mkdirp "^0.5.1" +babel-plugin-emotion@^9.2.11: + version "9.2.11" + resolved "https://registry.yarnpkg.com/babel-plugin-emotion/-/babel-plugin-emotion-9.2.11.tgz#319c005a9ee1d15bb447f59fe504c35fd5807728" + integrity sha512-dgCImifnOPPSeXod2znAmgc64NhaaOjGEHROR/M+lmStb3841yK1sgaDYAYMnlvWNz8GnpwIPN0VmNpbWYZ+VQ== + dependencies: + "@babel/helper-module-imports" "^7.0.0" + "@emotion/babel-utils" "^0.6.4" + "@emotion/hash" "^0.6.2" + "@emotion/memoize" "^0.6.1" + "@emotion/stylis" "^0.7.0" + babel-plugin-macros "^2.0.0" + babel-plugin-syntax-jsx "^6.18.0" + convert-source-map "^1.5.0" + find-root "^1.1.0" + mkdirp "^0.5.1" + source-map "^0.5.7" + touch "^2.0.1" + +babel-plugin-macros@^2.0.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/babel-plugin-macros/-/babel-plugin-macros-2.8.0.tgz#0f958a7cc6556b1e65344465d99111a1e5e10138" + integrity sha512-SEP5kJpfGYqYKpBrj5XU3ahw5p5GOHJ0U5ssOSQ/WBVdwkD2Dzlce95exQTs3jOVWPPKLBN2rlEWkCK7dSmLvg== + dependencies: + "@babel/runtime" "^7.7.2" + cosmiconfig "^6.0.0" + resolve "^1.12.0" + +babel-plugin-syntax-jsx@^6.18.0: + version "6.18.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz#0af32a9a6e13ca7a3fd5069e62d7b0f58d0d8946" + integrity sha1-CvMqmm4Tyno/1QaeYtew9Y0NiUY= + babel-polyfill@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-polyfill/-/babel-polyfill-6.26.0.tgz#379937abc67d7895970adc621f284cd966cf2153" @@ -895,11 +1068,6 @@ babel@^6.23.0: resolved "https://registry.yarnpkg.com/babel/-/babel-6.23.0.tgz#d0d1e7d803e974765beea3232d4e153c0efb90f4" integrity sha1-0NHn2APpdHZb7qMjLU4VPA77kPQ= -babylon@7.0.0-beta.44: - version "7.0.0-beta.44" - resolved "https://registry.yarnpkg.com/babylon/-/babylon-7.0.0-beta.44.tgz#89159e15e6e30c5096e22d738d8c0af8a0e8ca1d" - integrity sha512-5Hlm13BJVAioCHpImtFqNOF2H3ieTOHd0fmFGMxOJ9jgeFqeAwsv3u5P5cR7CSeFrkgHsT19DgFJkHV0/Mcd8g== - babylon@^6.18.0: version "6.18.0" resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" @@ -961,6 +1129,11 @@ binary-extensions@^1.0.0: resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== +binary-extensions@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.1.0.tgz#30fa40c9e7fe07dbc895678cd287024dea241dd9" + integrity sha512-1Yj8h9Q+QDF5FzhMs/c9+6UntbD5MkRfRwac8DoEm9ZfUBZ7tZ55YcGVAzEe4bXsdQHEk+s9S5wsOKVdZrw0tQ== + bindings@^1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df" @@ -973,10 +1146,15 @@ bluebird@^3.5.5: resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== -bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0: - version "4.11.8" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.8.tgz#2cde09eb5ee341f484746bb0309b3253b1b1442f" - integrity sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA== +bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.4.0: + version "4.11.9" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.9.tgz#26d556829458f9d1e81fc48952493d0ba3507828" + integrity sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw== + +bn.js@^5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.1.2.tgz#c9686902d3c9a27729f43ab10f9d79c2004da7b0" + integrity sha512-40rZaf3bUNKTVYu9sIeeEGOg7g14Yvnj9kH7b50EiwX0Q7A6umbvfI5tvHaOERH0XigqKkfLkFQxzb4e6CIXnA== bootstrap-3-typeahead@^4.0.2: version "4.0.2" @@ -1017,7 +1195,7 @@ braces@^2.3.1, braces@^2.3.2: split-string "^3.0.2" to-regex "^3.0.1" -braces@^3.0.1: +braces@^3.0.1, braces@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== @@ -1060,7 +1238,7 @@ browserify-des@^1.0.0: inherits "^2.0.1" safe-buffer "^5.1.2" -browserify-rsa@^4.0.0: +browserify-rsa@^4.0.0, browserify-rsa@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524" integrity sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ= @@ -1069,17 +1247,19 @@ browserify-rsa@^4.0.0: randombytes "^2.0.1" browserify-sign@^4.0.0: - version "4.0.4" - resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298" - integrity sha1-qk62jl17ZYuqa/alfmMMvXqT0pg= - dependencies: - bn.js "^4.1.1" - browserify-rsa "^4.0.0" - create-hash "^1.1.0" - create-hmac "^1.1.2" - elliptic "^6.0.0" - inherits "^2.0.1" - parse-asn1 "^5.0.0" + version "4.2.0" + resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.2.0.tgz#545d0b1b07e6b2c99211082bf1b12cce7a0b0e11" + integrity sha512-hEZC1KEeYuoHRqhGhTy6gWrpJA3ZDjFWv0DE61643ZnOXAKJb3u7yWcrU0mMc9SwAqK1n7myPGndkp0dFG7NFA== + dependencies: + bn.js "^5.1.1" + browserify-rsa "^4.0.1" + create-hash "^1.2.0" + create-hmac "^1.1.7" + elliptic "^6.5.2" + inherits "^2.0.4" + parse-asn1 "^5.1.5" + readable-stream "^3.6.0" + safe-buffer "^5.2.0" browserify-zlib@^0.2.0: version "0.2.0" @@ -1088,15 +1268,15 @@ browserify-zlib@^0.2.0: dependencies: pako "~1.0.5" -browserslist@^4.11.1: - version "4.11.1" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.11.1.tgz#92f855ee88d6e050e7e7311d987992014f1a1f1b" - integrity sha512-DCTr3kDrKEYNw6Jb9HFxVLQNaue8z+0ZfRBRjmCunKDEXEBajKDj2Y+Uelg+Pi29OnvaSGwjOsnRyNEkXzHg5g== +browserslist@^4.12.0: + version "4.13.0" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.13.0.tgz#42556cba011e1b0a2775b611cba6a8eca18e940d" + integrity sha512-MINatJ5ZNrLnQ6blGvePd/QOz9Xtu+Ne+x29iQSCHfkU5BugKVJwZKn/iiL8UbpIpa3JhviKjz+XxMo0m2caFQ== dependencies: - caniuse-lite "^1.0.30001038" - electron-to-chromium "^1.3.390" - node-releases "^1.1.53" - pkg-up "^2.0.0" + caniuse-lite "^1.0.30001093" + electron-to-chromium "^1.3.488" + escalade "^3.0.1" + node-releases "^1.1.58" buffer-from@^1.0.0: version "1.1.1" @@ -1143,26 +1323,28 @@ cacache@^12.0.2: unique-filename "^1.1.1" y18n "^4.0.0" -cacache@^12.0.3: - version "12.0.3" - resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.3.tgz#be99abba4e1bf5df461cd5a2c1071fc432573390" - integrity sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw== +cacache@^15.0.4: + version "15.0.5" + resolved "https://registry.yarnpkg.com/cacache/-/cacache-15.0.5.tgz#69162833da29170d6732334643c60e005f5f17d0" + integrity sha512-lloiL22n7sOjEEXdL8NAjTgv9a1u43xICE9/203qonkZUCj5X1UEWIdf2/Y0d6QcCtMzbKQyhrcDbdvlZTs/+A== dependencies: - bluebird "^3.5.5" - chownr "^1.1.1" - figgy-pudding "^3.5.1" + "@npmcli/move-file" "^1.0.1" + chownr "^2.0.0" + fs-minipass "^2.0.0" glob "^7.1.4" - graceful-fs "^4.1.15" - infer-owner "^1.0.3" - lru-cache "^5.1.1" - mississippi "^3.0.0" - mkdirp "^0.5.1" - move-concurrently "^1.0.1" + infer-owner "^1.0.4" + lru-cache "^6.0.0" + minipass "^3.1.1" + minipass-collect "^1.0.2" + minipass-flush "^1.0.5" + minipass-pipeline "^1.2.2" + mkdirp "^1.0.3" + p-map "^4.0.0" promise-inflight "^1.0.1" - rimraf "^2.6.3" - ssri "^6.0.1" + rimraf "^3.0.2" + ssri "^8.0.0" + tar "^6.0.2" unique-filename "^1.1.1" - y18n "^4.0.0" cache-base@^1.0.1: version "1.0.1" @@ -1184,24 +1366,12 @@ call-me-maybe@^1.0.1: resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.1.tgz#26d208ea89e37b5cbde60250a15f031c16a4d66b" integrity sha1-JtII6onje1y95gJQoV8DHBak1ms= -caller-path@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-0.1.0.tgz#94085ef63581ecd3daa92444a8fe94e82577751f" - integrity sha1-lAhe9jWB7NPaqSREqP6U6CV3dR8= - dependencies: - callsites "^0.2.0" - -callsites@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-0.2.0.tgz#afab96262910a7f33c19a5775825c69f34e350ca" - integrity sha1-r6uWJikQp/M8GaV3WCXGnzTjUMo= - callsites@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== -camelcase-keys@^6.1.1: +camelcase-keys@^6.2.2: version "6.2.2" resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-6.2.2.tgz#5e755d6ba51aa223ec7d3d52f25778210f9dc3c0" integrity sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg== @@ -1215,25 +1385,21 @@ camelcase@^5.0.0, camelcase@^5.3.1: resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== -caniuse-lite@^1.0.30001038, caniuse-lite@^1.0.30001039: - version "1.0.30001039" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001039.tgz#b3814a1c38ffeb23567f8323500c09526a577bbe" - integrity sha512-SezbWCTT34eyFoWHgx8UWso7YtvtM7oosmFoXbCkdC6qJzRfBTeTgE9REtKtiuKXuMwWTZEvdnFNGAyVMorv8Q== +camelcase@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.0.0.tgz#5259f7c30e35e278f1bdc2a4d91230b37cad981e" + integrity sha512-8KMDF1Vz2gzOq54ONPJS65IvTUaB1cHJ2DMM7MbPmLZljDH1qpzzLsWdiN9pHh6qvkRVDTi/07+eNGch/oLU4w== + +caniuse-lite@^1.0.30001093, caniuse-lite@^1.0.30001097: + version "1.0.30001105" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001105.tgz#d2cb0b31e5cf2f3ce845033b61c5c01566549abf" + integrity sha512-JupOe6+dGMr7E20siZHIZQwYqrllxotAhiaej96y6x00b/48rPt42o+SzOSCPbrpsDWvRja40Hwrj0g0q6LZJg== ccount@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.0.5.tgz#ac82a944905a65ce204eb03023157edf29425c17" integrity sha512-MOli1W+nfbPLlKEhInaxhRdp7KVLFxLN5ykwzHgLsLI3H3gs5jjFAK4Eoj3OzzcxCtumDaI8onoVDeQyWaNTkw== -chalk@2.4.2, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.4.1, chalk@^2.4.2: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" @@ -1245,6 +1411,15 @@ chalk@^1.1.3: strip-ansi "^3.0.0" supports-color "^2.0.0" +chalk@^2.0.0, chalk@^2.0.1, chalk@^2.4.1, chalk@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + chalk@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.0.0.tgz#6e98081ed2d17faab615eb52ac66ec1fe6209e72" @@ -1253,6 +1428,14 @@ chalk@^4.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" +chalk@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.0.tgz#4e14870a618d9e2edd97dd8345fd9d9dc315646a" + integrity sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + character-entities-html4@^1.0.0: version "1.1.4" resolved "https://registry.yarnpkg.com/character-entities-html4/-/character-entities-html4-1.1.4.tgz#0e64b0a3753ddbf1fdc044c5fd01d0199a02e125" @@ -1273,11 +1456,6 @@ character-reference-invalid@^1.0.0: resolved "https://registry.yarnpkg.com/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz#083329cda0eae272ab3dbbf37e9a382c13af1560" integrity sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg== -chardet@^0.4.0: - version "0.4.2" - resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.4.2.tgz#b5473b33dc97c424e5d98dc87d55d4d8a29c8bf2" - integrity sha1-tUc7M9yXxCTl2Y3IfVXU2KKci/I= - chokidar@^2.1.8: version "2.1.8" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" @@ -1297,10 +1475,30 @@ chokidar@^2.1.8: optionalDependencies: fsevents "^1.2.7" +chokidar@^3.4.0: + version "3.4.1" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.4.1.tgz#e905bdecf10eaa0a0b1db0c664481cc4cbc22ba1" + integrity sha512-TQTJyr2stihpC4Sya9hs2Xh+O2wf+igjL36Y75xx2WdHuiICcn/XJza46Jwt0eT5hVpQOzo3FpY3cj3RVYLX0g== + dependencies: + anymatch "~3.1.1" + braces "~3.0.2" + glob-parent "~5.1.0" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.4.0" + optionalDependencies: + fsevents "~2.1.2" + chownr@^1.1.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.3.tgz#42d837d5239688d55f303003a508230fa6727142" - integrity sha512-i70fVHhmV3DtTl6nqvZOnIjbY0Pe4kAUjwHj8z0zAdgBtYrJyYwLKCCuRBQ5ppkyL0AkN7HKRnETdmdp1zqNXw== + version "1.1.4" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" + integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== + +chownr@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece" + integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ== chrome-trace-event@^1.0.2: version "1.0.2" @@ -1317,11 +1515,6 @@ cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: inherits "^2.0.1" safe-buffer "^5.0.1" -circular-json@^0.3.1: - version "0.3.3" - resolved "https://registry.yarnpkg.com/circular-json/-/circular-json-0.3.3.tgz#815c99ea84f6809529d2f45791bdf82711352d66" - integrity sha512-UZK3NBx2Mca+b5LsG7bY183pHWt5Y1xts4P3Pz7ENTwGVnJOUWbRb3ocjvX7hx9tq/yTAdclXm9sZ38gNuem4A== - class-utils@^0.3.5: version "0.3.6" resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" @@ -1332,29 +1525,23 @@ class-utils@^0.3.5: isobject "^3.0.0" static-extend "^0.1.1" -classnames@^2.2.3, classnames@^2.2.6: +classnames@^2.2.6: version "2.2.6" resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.2.6.tgz#43935bffdd291f326dad0a205309b38d00f650ce" integrity sha512-JR/iSQOSt+LQIWwrwEzJ9uk0xfN3mTVYMwt1Ir5mUcSN6pU+V4zQFFaJsclJbPuAUQH+yfWef6tm7l1quW3C8Q== -clean-webpack-plugin@^0.1.19: - version "0.1.19" - resolved "https://registry.yarnpkg.com/clean-webpack-plugin/-/clean-webpack-plugin-0.1.19.tgz#ceda8bb96b00fe168e9b080272960d20fdcadd6d" - integrity sha512-M1Li5yLHECcN2MahoreuODul5LkjohJGFxLPTjl3j1ttKrF5rgjZET1SJduuqxLAuT1gAPOdkhg03qcaaU1KeA== - dependencies: - rimraf "^2.6.1" +clean-stack@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== -cli-cursor@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" - integrity sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU= +clean-webpack-plugin@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/clean-webpack-plugin/-/clean-webpack-plugin-3.0.0.tgz#a99d8ec34c1c628a4541567aa7b457446460c62b" + integrity sha512-MciirUH5r+cYLGCOL5JX/ZLzOZbVr1ot3Fw+KcvbhUb6PM+yycqd9ZhIlcigQ5gl+XhppNmw3bEFuaaMNyLj3A== dependencies: - restore-cursor "^2.0.0" - -cli-width@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.0.tgz#ff19ede8a9a5e579324147b0c11f0fbcbabed639" - integrity sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk= + "@types/webpack" "^4.4.31" + del "^4.1.1" clipboard@^2.0.0: version "2.0.6" @@ -1456,6 +1643,11 @@ color-name@~1.1.4: resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== +colorette@^1.2.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.1.tgz#4d0b921325c14faf92633086a536db6e89564b1b" + integrity sha512-puCDz0CzydiSYOrnXpz/PKd69zRrribezjtE9yd4zvytoRc8+RY/KJPvtPFKZS3E3wP6neGyMe0vOTlHO5L3Pw== + commander@2, commander@^2.20.0, commander@~2.20.3: version "2.20.3" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" @@ -1476,7 +1668,7 @@ concat-map@0.0.1: resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= -concat-stream@^1.5.0, concat-stream@^1.6.0: +concat-stream@^1.5.0: version "1.6.2" resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== @@ -1486,7 +1678,7 @@ concat-stream@^1.5.0, concat-stream@^1.6.0: readable-stream "^2.2.2" typedarray "^0.0.6" -confusing-browser-globals@^1.0.5: +confusing-browser-globals@^1.0.9: version "1.0.9" resolved "https://registry.yarnpkg.com/confusing-browser-globals/-/confusing-browser-globals-1.0.9.tgz#72bc13b483c0276801681871d4898516f8f54fdd" integrity sha512-KbS1Y0jMtyPgIxjO7ZzMAuUpAKMt1SzCL9fsrKsX6b0zJPTaT0SiSPmewwVZg9UAO83HVIlEhZF84LIjZ0lmAw== @@ -1506,7 +1698,7 @@ contains-path@^0.1.0: resolved "https://registry.yarnpkg.com/contains-path/-/contains-path-0.1.0.tgz#fe8cf184ff6670b6baef01a9d4861a5cbec4120a" integrity sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo= -convert-source-map@^1.5.1, convert-source-map@^1.7.0: +convert-source-map@^1.5.0, convert-source-map@^1.5.1, convert-source-map@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== @@ -1530,23 +1722,22 @@ copy-descriptor@^0.1.0: resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= -copy-webpack-plugin@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/copy-webpack-plugin/-/copy-webpack-plugin-5.1.1.tgz#5481a03dea1123d88a988c6ff8b78247214f0b88" - integrity sha512-P15M5ZC8dyCjQHWwd4Ia/dm0SgVvZJMYeykVIVYXbGyqO4dWB5oyPHp9i7wjwo5LhtlhKbiBCdS2NvM07Wlybg== - dependencies: - cacache "^12.0.3" - find-cache-dir "^2.1.0" - glob-parent "^3.1.0" - globby "^7.1.1" - is-glob "^4.0.1" - loader-utils "^1.2.3" - minimatch "^3.0.4" +copy-webpack-plugin@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/copy-webpack-plugin/-/copy-webpack-plugin-6.0.3.tgz#2b3d2bfc6861b96432a65f0149720adbd902040b" + integrity sha512-q5m6Vz4elsuyVEIUXr7wJdIdePWTubsqVbEMvf1WQnHGv0Q+9yPRu7MtYFPt+GBOXRav9lvIINifTQ1vSCs+eA== + dependencies: + cacache "^15.0.4" + fast-glob "^3.2.4" + find-cache-dir "^3.3.1" + glob-parent "^5.1.1" + globby "^11.0.1" + loader-utils "^2.0.0" normalize-path "^3.0.0" - p-limit "^2.2.1" - schema-utils "^1.0.0" - serialize-javascript "^2.1.2" - webpack-log "^2.0.0" + p-limit "^3.0.1" + schema-utils "^2.7.0" + serialize-javascript "^4.0.0" + webpack-sources "^1.4.3" core-js@^2.4.0, core-js@^2.5.0: version "2.6.11" @@ -1582,7 +1773,20 @@ create-ecdh@^4.0.0: bn.js "^4.1.0" elliptic "^6.0.0" -create-hash@^1.1.0, create-hash@^1.1.2: +create-emotion@^9.2.12: + version "9.2.12" + resolved "https://registry.yarnpkg.com/create-emotion/-/create-emotion-9.2.12.tgz#0fc8e7f92c4f8bb924b0fef6781f66b1d07cb26f" + integrity sha512-P57uOF9NL2y98Xrbl2OuiDQUZ30GVmASsv5fbsjF4Hlraip2kyAvMm+2PoYUvFFw03Fhgtxk3RqZSm2/qHL9hA== + dependencies: + "@emotion/hash" "^0.6.2" + "@emotion/memoize" "^0.6.1" + "@emotion/stylis" "^0.7.0" + "@emotion/unitless" "^0.6.2" + csstype "^2.5.2" + stylis "^3.5.0" + stylis-rule-sheet "^0.0.10" + +create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== @@ -1593,7 +1797,7 @@ create-hash@^1.1.0, create-hash@^1.1.2: ripemd160 "^2.0.1" sha.js "^2.4.0" -create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: +create-hmac@^1.1.0, create-hmac@^1.1.4, create-hmac@^1.1.7: version "1.1.7" resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== @@ -1605,7 +1809,7 @@ create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: safe-buffer "^5.0.1" sha.js "^2.4.8" -cross-spawn@6.0.5, cross-spawn@^6.0.0: +cross-spawn@^6.0.0, cross-spawn@^6.0.5: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== @@ -1616,14 +1820,14 @@ cross-spawn@6.0.5, cross-spawn@^6.0.0: shebang-command "^1.2.0" which "^1.2.9" -cross-spawn@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" - integrity sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk= +cross-spawn@^7.0.2: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== dependencies: - lru-cache "^4.0.1" - shebang-command "^1.2.0" - which "^1.2.9" + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" crypto-browserify@^3.11.0: version "3.12.0" @@ -1643,22 +1847,23 @@ crypto-browserify@^3.11.0: randomfill "^1.0.3" css-loader@^3.4.2: - version "3.4.2" - resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-3.4.2.tgz#d3fdb3358b43f233b78501c5ed7b1c6da6133202" - integrity sha512-jYq4zdZT0oS0Iykt+fqnzVLRIeiPWhka+7BqPn+oSIpWJAHak5tmB/WZrJ2a21JhCeFyNnnlroSl8c+MtVndzA== + version "3.6.0" + resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-3.6.0.tgz#2e4b2c7e6e2d27f8c8f28f61bffcd2e6c91ef645" + integrity sha512-M5lSukoWi1If8dhQAUCvj4H8vUt3vOnwbQBH9DdTm/s4Ym2B/3dPMtYZeJmq7Q3S3Pa+I94DcZ7pc9bP14cWIQ== dependencies: camelcase "^5.3.1" cssesc "^3.0.0" icss-utils "^4.1.1" loader-utils "^1.2.3" normalize-path "^3.0.0" - postcss "^7.0.23" + postcss "^7.0.32" postcss-modules-extract-imports "^2.0.0" postcss-modules-local-by-default "^3.0.2" - postcss-modules-scope "^2.1.1" + postcss-modules-scope "^2.2.0" postcss-modules-values "^3.0.0" - postcss-value-parser "^4.0.2" - schema-utils "^2.6.0" + postcss-value-parser "^4.1.0" + schema-utils "^2.7.0" + semver "^6.3.0" css-modules-require-hook@^4.0.6: version "4.2.3" @@ -1697,6 +1902,11 @@ cssesc@^3.0.0: resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== +csstype@^2.5.2: + version "2.6.11" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-2.6.11.tgz#452f4d024149ecf260a852b025e36562a253ffc5" + integrity sha512-l8YyEC9NBkSm783PFTvh0FmJy7s5pFKrDp49ZL7zBGX3fWkO+N4EEyan1qqp8cwPLDcD0OSdyY6hAMoxp34JFw== + cyclist@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" @@ -1981,18 +2191,18 @@ dagre@^0.8.5: graphlib "^2.1.8" lodash "^4.17.15" -datatables.net-bs@^1.10.19: - version "1.10.20" - resolved "https://registry.yarnpkg.com/datatables.net-bs/-/datatables.net-bs-1.10.20.tgz#4a54a65527013aa8bb98eb7fa27d6231f7dc1bee" - integrity sha512-NsMoOOYZ6NlteOpzhltw21lXsNdhjIMbIOxnqmcrb62ntl8eL9pYzk2AeiDXBlIKY4e550ZrExCq3CYKQ9myEg== +datatables.net-bs@^1.10.21: + version "1.10.21" + resolved "https://registry.yarnpkg.com/datatables.net-bs/-/datatables.net-bs-1.10.21.tgz#c80e655033787512423ad78a45e8164f25417dc5" + integrity sha512-4mpesFXNEkLlQET3IDLclLz95Xit4Kp/jHcOM2X0nc/ijDfO3qJk3ehZ+NSEAkXZDge6ZtY5Zxq2O90ISiIjwQ== dependencies: - datatables.net "1.10.20" + datatables.net "1.10.21" jquery ">=1.7" -datatables.net@1.10.20, datatables.net@^1.10.19: - version "1.10.20" - resolved "https://registry.yarnpkg.com/datatables.net/-/datatables.net-1.10.20.tgz#9d65ecc3c83cbe7baa4fa5a053405c8fe42c1350" - integrity sha512-4E4S7tTU607N3h0fZPkGmAtr9mwy462u+VJ6gxYZ8MxcRIjZqHy3Dv1GNry7i3zQCktTdWbULVKBbkAJkuHEnQ== +datatables.net@1.10.21, datatables.net@^1.10.21: + version "1.10.21" + resolved "https://registry.yarnpkg.com/datatables.net/-/datatables.net-1.10.21.tgz#f1d35c8e5c3eb7f5caef39e80cd5b836a8c77103" + integrity sha512-/bSZtxmf3GTpYcvEmwZ8q26I1yhSx8qklR2B+s1K8+/51UW/zc2zTYwJMqr/Z+iCYixAc00ildj4g2x0Qamolw== dependencies: jquery ">=1.7" @@ -2003,14 +2213,7 @@ debug@^2.2.0, debug@^2.3.3, debug@^2.6.8, debug@^2.6.9: dependencies: ms "2.0.0" -debug@^3.1.0: - version "3.2.6" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" - integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== - dependencies: - ms "^2.1.1" - -debug@^4.1.0, debug@^4.1.1: +debug@^4.0.1, debug@^4.1.0, debug@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== @@ -2040,7 +2243,7 @@ decode-uri-component@^0.2.0: resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= -deep-is@~0.1.3: +deep-is@^0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= @@ -2074,6 +2277,19 @@ define-property@^2.0.2: is-descriptor "^1.0.2" isobject "^3.0.1" +del@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" + integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== + dependencies: + "@types/glob" "^7.1.1" + globby "^6.1.0" + is-path-cwd "^2.0.0" + is-path-in-cwd "^2.0.0" + p-map "^2.0.0" + pify "^4.0.1" + rimraf "^2.6.3" + delegate@^3.1.2: version "3.2.0" resolved "https://registry.yarnpkg.com/delegate/-/delegate-3.2.0.tgz#b66b71c3158522e8ab5744f720d8ca0c2af59166" @@ -2108,13 +2324,6 @@ diffie-hellman@^5.0.0: miller-rabin "^4.0.0" randombytes "^2.0.0" -dir-glob@^2.0.0: - version "2.2.2" - resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-2.2.2.tgz#fa09f0694153c8918b18ba0deafae94769fc50c4" - integrity sha512-f9LBi5QWzIW3I6e//uxZoLBlUt9kcp66qo0sSCxL6YZKc75R1c4MFCoe/LaZiBGmgujvQdxc5Bn3QhfyvK5Hsw== - dependencies: - path-type "^3.0.0" - dir-glob@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" @@ -2130,14 +2339,14 @@ doctrine@1.5.0: esutils "^2.0.2" isarray "^1.0.0" -doctrine@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" - integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== dependencies: esutils "^2.0.2" -dom-serializer@0: +dom-serializer@0, dom-serializer@^0.2.1: version "0.2.2" resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== @@ -2167,10 +2376,17 @@ domhandler@^2.3.0: dependencies: domelementtype "1" +domhandler@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-3.0.0.tgz#51cd13efca31da95bbb0c5bee3a48300e333b3e9" + integrity sha512-eKLdI5v9m67kbXQbJSNn1zjh0SDzvzWVWtX+qEI3eMjZw8daH9k8rlj1FZY9memPwjiskQFbe7vHVVJIAqoEhw== + dependencies: + domelementtype "^2.0.1" + dompurify@^2.0.8: - version "2.0.11" - resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.0.11.tgz#cd47935774230c5e478b183a572e726300b3891d" - integrity sha512-qVoGPjIW9IqxRij7klDQQ2j6nSe4UNWANBhZNLnsS7ScTtLb+3YdxkRY8brNTpkUiTtcXsCJO+jS0UCDfenLuA== + version "2.0.12" + resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.0.12.tgz#284a2b041e1c60b8e72d7b4d2fadad36141254ae" + integrity sha512-Fl8KseK1imyhErHypFPA8qpq9gPzlsJ/EukA6yk9o0gX23p1TzC+rh9LqNg1qvErRTc0UNMYlKxEGSfSh43NDg== domutils@^1.5.1: version "1.7.0" @@ -2180,6 +2396,15 @@ domutils@^1.5.1: dom-serializer "0" domelementtype "1" +domutils@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-2.1.0.tgz#7ade3201af43703fde154952e3a868eb4b635f16" + integrity sha512-CD9M0Dm1iaHfQ1R/TI+z3/JWp/pgub0j4jIQKH89ARR4ATAV2nbaOQS5XxU9maJP5jHaPdDDQSEHuE2UmpUTKg== + dependencies: + dom-serializer "^0.2.1" + domelementtype "^2.0.1" + domhandler "^3.0.0" + duplexify@^3.4.2, duplexify@^3.6.0: version "3.7.1" resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" @@ -2190,15 +2415,15 @@ duplexify@^3.4.2, duplexify@^3.6.0: readable-stream "^2.0.0" stream-shift "^1.0.0" -electron-to-chromium@^1.3.390: - version "1.3.398" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.398.tgz#4c01e29091bf39e578ac3f66c1f157d92fa5725d" - integrity sha512-BJjxuWLKFbM5axH3vES7HKMQgAknq9PZHBkMK/rEXUQG9i1Iw5R+6hGkm6GtsQSANjSUrh/a6m32nzCNDNo/+w== +electron-to-chromium@^1.3.488: + version "1.3.504" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.504.tgz#54d6288202f8453053c006eb862e2e3b7bc867a5" + integrity sha512-yOXnuPaaLAIZUVuXHYDCo3EeaiEfbFgYWCPH1tBMp+jznCq/zQYKnf6HmkKBmLJ0VES81avl18JZO1lx/XAHOw== -elliptic@^6.0.0: - version "6.5.2" - resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.2.tgz#05c5678d7173c049d8ca433552224a495d0e3762" - integrity sha512-f4x70okzZbIQl/NSRLkI/+tteV/9WqL98zx+SQ69KbXxmVrmjwsNUPn/gYJJ0sHvEak24cZgHIPegRePAtA/xw== +elliptic@^6.0.0, elliptic@^6.5.2: + version "6.5.3" + resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.3.tgz#cb59eb2efdaf73a0bd78ccd7015a62ad6e0f93d6" + integrity sha512-IMqzv5wNQf+E6aHeIqATs0tOLeOTwj1QKbRcS3jBbYkl5oLAserA8yJTT7/VyHUYG91PRmPyeQDObKLPpeS4dw== dependencies: bn.js "^4.4.0" brorand "^1.0.1" @@ -2223,6 +2448,19 @@ emojis-list@^2.0.0: resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k= +emojis-list@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" + integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== + +emotion@^9.2.6: + version "9.2.12" + resolved "https://registry.yarnpkg.com/emotion/-/emotion-9.2.12.tgz#53925aaa005614e65c6e43db8243c843574d1ea9" + integrity sha512-hcx7jppaI8VoXxIWEhxpDW7I+B4kq9RNzQLmsrF6LY8BGKqe2N+gFAQr0EfuFucFlPs2A9HM4+xNj4NeqEWIOQ== + dependencies: + babel-plugin-emotion "^9.2.11" + create-emotion "^9.2.12" + end-of-stream@^1.0.0, end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" @@ -2230,23 +2468,21 @@ end-of-stream@^1.0.0, end-of-stream@^1.1.0: dependencies: once "^1.4.0" -enhanced-resolve@4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz#41c7e0bfdfe74ac1ffe1e57ad6a5c6c9f3742a7f" - integrity sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng== +enhanced-resolve@^4.1.0, enhanced-resolve@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.3.0.tgz#3b806f3bfafc1ec7de69551ef93cca46c1704126" + integrity sha512-3e87LvavsdxyoCfGusJnrZ5G8SLPOFeHSNpZI/ATL9a5leXo2k0w6MKnbqhdBad9qTobSfB20Ld7UmgoNbAZkQ== dependencies: graceful-fs "^4.1.2" - memory-fs "^0.4.0" + memory-fs "^0.5.0" tapable "^1.0.0" -enhanced-resolve@^4.1.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz#2937e2b8066cd0fe7ce0990a98f0d71a35189f66" - integrity sha512-98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA== +enquirer@^2.3.5: + version "2.3.6" + resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d" + integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== dependencies: - graceful-fs "^4.1.2" - memory-fs "^0.5.0" - tapable "^1.0.0" + ansi-colors "^4.1.1" entities@^1.1.1: version "1.1.2" @@ -2282,7 +2518,24 @@ error-ex@^1.2.0, error-ex@^1.3.1: dependencies: is-arrayish "^0.2.1" -es-abstract@^1.17.0-next.0, es-abstract@^1.17.0-next.1: +es-abstract@^1.17.0, es-abstract@^1.17.5: + version "1.17.6" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.17.6.tgz#9142071707857b2cacc7b89ecb670316c3e2d52a" + integrity sha512-Fr89bON3WFyUi5EvAeI48QTWX0AyekGgLA8H+c+7fbfCkJwRWRMLd8CQedNEyJuoYYhmtEqY92pgte1FAhBlhw== + dependencies: + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + is-callable "^1.2.0" + is-regex "^1.1.0" + object-inspect "^1.7.0" + object-keys "^1.1.1" + object.assign "^4.1.0" + string.prototype.trimend "^1.0.1" + string.prototype.trimstart "^1.0.1" + +es-abstract@^1.17.0-next.1: version "1.17.0-next.1" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.17.0-next.1.tgz#94acc93e20b05a6e96dacb5ab2f1cb3a81fc2172" integrity sha512-7MmGr03N7Rnuid6+wyhD9sHNE2n4tFSwExnU2lQl3lIo2ShXWGePY80zYaoMOmILWv57H0amMjZGHNzzGG70Rw== @@ -2313,106 +2566,96 @@ es6-promise@^3.2.1: resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-3.3.1.tgz#a08cdde84ccdbf34d027a1451bc91d4bcd28a613" integrity sha1-oIzd6EzNvzTQJ6FFG8kdS80ophM= +escalade@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.0.2.tgz#6a580d70edb87880f22b4c91d0d56078df6962c4" + integrity sha512-gPYAU37hYCUhW5euPeR+Y74F7BL+IBsV93j5cvGriSaD1aG6MGsqsV1yamRdrWrb2j3aiZvb0X+UBOWpx3JWtQ== + escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= -eslint-config-airbnb-base@^13.0.0: - version "13.2.0" - resolved "https://registry.yarnpkg.com/eslint-config-airbnb-base/-/eslint-config-airbnb-base-13.2.0.tgz#f6ea81459ff4dec2dda200c35f1d8f7419d57943" - integrity sha512-1mg/7eoB4AUeB0X1c/ho4vb2gYkNH8Trr/EgCT/aGmKhhG+F6vF5s8+iRBlWAzFIAphxIdp3YfEKgEl0f9Xg+w== +eslint-config-airbnb-base@^14.2.0: + version "14.2.0" + resolved "https://registry.yarnpkg.com/eslint-config-airbnb-base/-/eslint-config-airbnb-base-14.2.0.tgz#fe89c24b3f9dc8008c9c0d0d88c28f95ed65e9c4" + integrity sha512-Snswd5oC6nJaevs3nZoLSTvGJBvzTfnBqOIArkf3cbyTyq9UD79wOk8s+RiL6bhca0p/eRO6veczhf6A/7Jy8Q== dependencies: - confusing-browser-globals "^1.0.5" + confusing-browser-globals "^1.0.9" object.assign "^4.1.0" - object.entries "^1.1.0" + object.entries "^1.1.2" -eslint-import-resolver-node@^0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.2.tgz#58f15fb839b8d0576ca980413476aab2472db66a" - integrity sha512-sfmTqJfPSizWu4aymbPr4Iidp5yKm8yDkHp+Ir3YiTHiiDfxh69mOUsmiqW6RZ9zRXFaF64GtYmN7e+8GHBv6Q== +eslint-import-resolver-node@^0.3.3: + version "0.3.4" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.4.tgz#85ffa81942c25012d8231096ddf679c03042c717" + integrity sha512-ogtf+5AB/O+nM6DIeBUNr2fuT7ot9Qg/1harBfBtaP13ekEWFQEEMP94BCB7zaNW3gyY+8SHYF00rnqYwXKWOA== dependencies: debug "^2.6.9" - resolve "^1.5.0" + resolve "^1.13.1" -eslint-module-utils@^2.4.1: - version "2.5.0" - resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.5.0.tgz#cdf0b40d623032274ccd2abd7e64c4e524d6e19c" - integrity sha512-kCo8pZaNz2dsAW7nCUjuVoI11EBXXpIzfNxmaoLhXoRDOnqXLC4iSGVRdZPhOitfbdEfMEfKOiENaK6wDPZEGw== +eslint-module-utils@^2.6.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.6.0.tgz#579ebd094f56af7797d19c9866c9c9486629bfa6" + integrity sha512-6j9xxegbqe8/kZY8cYpcp0xhbK0EgJlg3g9mib3/miLaExuuwc3n5UEfSnU6hWMbT0FAYVvDbL9RrRgpUeQIvA== dependencies: debug "^2.6.9" pkg-dir "^2.0.0" -eslint-plugin-es@^1.3.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-es/-/eslint-plugin-es-1.4.1.tgz#12acae0f4953e76ba444bfd1b2271081ac620998" - integrity sha512-5fa/gR2yR3NxQf+UXkeLeP8FBBl6tSgdrAz1+cF84v1FMM4twGwQoqTnn+QxFLcPOrF4pdKEJKDB/q9GoyJrCA== +eslint-plugin-es@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz#75a7cdfdccddc0589934aeeb384175f221c57893" + integrity sha512-GUmAsJaN4Fc7Gbtl8uOBlayo2DqhwWvEzykMHSCZHU3XdJ+NSzzZcVhXh3VxX5icqQ+oQdIEawXX8xkR3mIFmQ== dependencies: - eslint-utils "^1.4.2" - regexpp "^2.0.1" + eslint-utils "^2.0.0" + regexpp "^3.0.0" -eslint-plugin-html@^4.0.5: - version "4.0.6" - resolved "https://registry.yarnpkg.com/eslint-plugin-html/-/eslint-plugin-html-4.0.6.tgz#724bb9272efb4df007dfee8dfb269ed83577e5b4" - integrity sha512-nj6A9oK+7BKnMm0E7dMRH3r75BfpkXtcVIb3pFC4AcDdBTNyg2NGxHXyFNT1emW4VsR7P2SZvRXXQtUR+kY08w== +eslint-plugin-html@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/eslint-plugin-html/-/eslint-plugin-html-6.0.2.tgz#fcbd293e218d03dd72c147fc999d185c6f5989fe" + integrity sha512-Ik/z32UteKLo8GEfwNqVKcJ/WOz/be4h8N5mbMmxxnZ+9aL9XczOXQFz/bGu+nAGVoRg8CflldxJhONFpqlrxw== dependencies: - htmlparser2 "^3.8.2" + htmlparser2 "^4.1.0" -eslint-plugin-import@^2.13.0: - version "2.19.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.19.1.tgz#5654e10b7839d064dd0d46cd1b88ec2133a11448" - integrity sha512-x68131aKoCZlCae7rDXKSAQmbT5DQuManyXo2sK6fJJ0aK5CWAkv6A6HJZGgqC8IhjQxYPgo6/IY4Oz8AFsbBw== +eslint-plugin-import@^2.22.0: + version "2.22.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.22.0.tgz#92f7736fe1fde3e2de77623c838dd992ff5ffb7e" + integrity sha512-66Fpf1Ln6aIS5Gr/55ts19eUuoDhAbZgnr6UxK5hbDx6l/QgQgx61AePq+BV4PP2uXQFClgMVzep5zZ94qqsxg== dependencies: - array-includes "^3.0.3" - array.prototype.flat "^1.2.1" + array-includes "^3.1.1" + array.prototype.flat "^1.2.3" contains-path "^0.1.0" debug "^2.6.9" doctrine "1.5.0" - eslint-import-resolver-node "^0.3.2" - eslint-module-utils "^2.4.1" + eslint-import-resolver-node "^0.3.3" + eslint-module-utils "^2.6.0" has "^1.0.3" minimatch "^3.0.4" - object.values "^1.1.0" + object.values "^1.1.1" read-pkg-up "^2.0.0" - resolve "^1.12.0" + resolve "^1.17.0" + tsconfig-paths "^3.9.0" -eslint-plugin-node@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-node/-/eslint-plugin-node-7.0.1.tgz#a6e054e50199b2edd85518b89b4e7b323c9f36db" - integrity sha512-lfVw3TEqThwq0j2Ba/Ckn2ABdwmL5dkOgAux1rvOk6CO7A6yGyPI2+zIxN6FyNkp1X1X/BSvKOceD6mBWSj4Yw== +eslint-plugin-node@^11.1.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz#c95544416ee4ada26740a30474eefc5402dc671d" + integrity sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g== dependencies: - eslint-plugin-es "^1.3.1" - eslint-utils "^1.3.1" - ignore "^4.0.2" + eslint-plugin-es "^3.0.0" + eslint-utils "^2.0.0" + ignore "^5.1.1" minimatch "^3.0.4" - resolve "^1.8.1" - semver "^5.5.0" - -eslint-plugin-promise@^3.8.0: - version "3.8.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-promise/-/eslint-plugin-promise-3.8.0.tgz#65ebf27a845e3c1e9d6f6a5622ddd3801694b621" - integrity sha512-JiFL9UFR15NKpHyGii1ZcvmtIqa3UTwiDAGb8atSffe43qJ3+1czVGN6UtkklpcJ2DVnqvTMzEKRaJdBkAL2aQ== - -eslint-plugin-standard@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-standard/-/eslint-plugin-standard-3.1.0.tgz#2a9e21259ba4c47c02d53b2d0c9135d4b1022d47" - integrity sha512-fVcdyuKRr0EZ4fjWl3c+gp1BANFJD1+RaWa2UPYfMZ6jCtp5RG00kSaXnK/dE5sYzt4kaWJ9qdxqUfc0d9kX0w== + resolve "^1.10.1" + semver "^6.1.0" -eslint-scope@3.7.1: - version "3.7.1" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-3.7.1.tgz#3d63c3edfda02e06e01a452ad88caacc7cdcb6e8" - integrity sha1-PWPD7f2gLgbgGkUq2IyqzHzctug= - dependencies: - esrecurse "^4.1.0" - estraverse "^4.1.1" +eslint-plugin-promise@^4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-promise/-/eslint-plugin-promise-4.2.1.tgz#845fd8b2260ad8f82564c1222fce44ad71d9418a" + integrity sha512-VoM09vT7bfA7D+upt+FjeBO5eHIJQBUWki1aPvB+vbNiHS3+oGIJGIeyBtKQTME6UPXXy3vV07OL1tHd3ANuDw== -eslint-scope@^3.7.1: - version "3.7.3" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-3.7.3.tgz#bb507200d3d17f60247636160b4826284b108535" - integrity sha512-W+B0SvF4gamyCTmUc+uITPY0989iXVfKvhwtmJocTaYoc/3khEHmEmvfY/Gn9HA9VV75jrQECsHizkNw1b68FA== - dependencies: - esrecurse "^4.1.0" - estraverse "^4.1.1" +eslint-plugin-standard@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-standard/-/eslint-plugin-standard-4.0.1.tgz#ff0519f7ffaff114f76d1bd7c3996eef0f6e20b4" + integrity sha512-v/KBnfyaOMPmZc/dmc6ozOdWqekGp7bBGq4jLAecEfPGmfKiWS4sA8sC0LqiV9w5qmXAtXVn4M3p1jSyhY85SQ== eslint-scope@^4.0.3: version "4.0.3" @@ -2422,10 +2665,18 @@ eslint-scope@^4.0.3: esrecurse "^4.1.0" estraverse "^4.1.1" -eslint-utils@^1.3.1, eslint-utils@^1.4.2: - version "1.4.3" - resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-1.4.3.tgz#74fec7c54d0776b6f67e0251040b5806564e981f" - integrity sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q== +eslint-scope@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.0.tgz#d0f971dfe59c69e0cada684b23d49dbf82600ce5" + integrity sha512-iiGRvtxWqgtx5m8EyQUJihBloE4EnYeGE/bz1wSPwJE6tZuJUtHlhqDM4Xj2ukE8Dyy1+HCZ4hE0fzIVMzb58w== + dependencies: + esrecurse "^4.1.0" + estraverse "^4.1.1" + +eslint-utils@^2.0.0, eslint-utils@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27" + integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== dependencies: eslint-visitor-keys "^1.1.0" @@ -2434,69 +2685,73 @@ eslint-visitor-keys@^1.0.0, eslint-visitor-keys@^1.1.0: resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz#e2a82cea84ff246ad6fb57f9bde5b46621459ec2" integrity sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A== -eslint@^4.19.1: - version "4.19.1" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-4.19.1.tgz#32d1d653e1d90408854bfb296f076ec7e186a300" - integrity sha512-bT3/1x1EbZB7phzYu7vCr1v3ONuzDtX8WjuM9c0iYxe+cq+pwcKEoQjl7zd3RpC6YOLgnSy3cTN58M2jcoPDIQ== - dependencies: - ajv "^5.3.0" - babel-code-frame "^6.22.0" - chalk "^2.1.0" - concat-stream "^1.6.0" - cross-spawn "^5.1.0" - debug "^3.1.0" - doctrine "^2.1.0" - eslint-scope "^3.7.1" - eslint-visitor-keys "^1.0.0" - espree "^3.5.4" - esquery "^1.0.0" +eslint-visitor-keys@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" + integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== + +eslint@^7.5.0: + version "7.5.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.5.0.tgz#9ecbfad62216d223b82ac9ffea7ef3444671d135" + integrity sha512-vlUP10xse9sWt9SGRtcr1LAC67BENcQMFeV+w5EvLEoFe3xJ8cF1Skd0msziRx/VMC+72B4DxreCE+OR12OA6Q== + dependencies: + "@babel/code-frame" "^7.0.0" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.0.1" + doctrine "^3.0.0" + enquirer "^2.3.5" + eslint-scope "^5.1.0" + eslint-utils "^2.1.0" + eslint-visitor-keys "^1.3.0" + espree "^7.2.0" + esquery "^1.2.0" esutils "^2.0.2" - file-entry-cache "^2.0.0" + file-entry-cache "^5.0.1" functional-red-black-tree "^1.0.1" - glob "^7.1.2" - globals "^11.0.1" - ignore "^3.3.3" + glob-parent "^5.0.0" + globals "^12.1.0" + ignore "^4.0.6" + import-fresh "^3.0.0" imurmurhash "^0.1.4" - inquirer "^3.0.6" - is-resolvable "^1.0.0" - js-yaml "^3.9.1" + is-glob "^4.0.0" + js-yaml "^3.13.1" json-stable-stringify-without-jsonify "^1.0.1" - levn "^0.3.0" - lodash "^4.17.4" - minimatch "^3.0.2" - mkdirp "^0.5.1" + levn "^0.4.1" + lodash "^4.17.19" + minimatch "^3.0.4" natural-compare "^1.4.0" - optionator "^0.8.2" - path-is-inside "^1.0.2" - pluralize "^7.0.0" + optionator "^0.9.1" progress "^2.0.0" - regexpp "^1.0.1" - require-uncached "^1.0.3" - semver "^5.3.0" - strip-ansi "^4.0.0" - strip-json-comments "~2.0.1" - table "4.0.2" - text-table "~0.2.0" + regexpp "^3.1.0" + semver "^7.2.1" + strip-ansi "^6.0.0" + strip-json-comments "^3.1.0" + table "^5.2.3" + text-table "^0.2.0" + v8-compile-cache "^2.0.3" -espree@^3.5.4: - version "3.5.4" - resolved "https://registry.yarnpkg.com/espree/-/espree-3.5.4.tgz#b0f447187c8a8bed944b815a660bddf5deb5d1a7" - integrity sha512-yAcIQxtmMiB/jL32dzEp2enBeidsB7xWPLNiw3IIkpVds1P+h7qF9YwJq1yUNzp2OKXgAprs4F61ih66UsoD1A== +espree@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/espree/-/espree-7.2.0.tgz#1c263d5b513dbad0ac30c4991b93ac354e948d69" + integrity sha512-H+cQ3+3JYRMEIOl87e7QdHX70ocly5iW4+dttuR8iYSPr/hXKFb+7dBsZ7+u1adC4VrnPlTkv0+OwuPnDop19g== dependencies: - acorn "^5.5.0" - acorn-jsx "^3.0.0" + acorn "^7.3.1" + acorn-jsx "^5.2.0" + eslint-visitor-keys "^1.3.0" esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== -esquery@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.0.1.tgz#406c51658b1f5991a5f9b62b1dc25b00e3e5c708" - integrity sha512-SmiyZ5zIWH9VM+SRUReLS5Q8a7GxtRdxEBVZpm98rJM7Sb+A9DVCndXfkeFUd3byderg+EbDkfnevfCwynWaNA== +esquery@^1.2.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.3.1.tgz#b78b5828aa8e214e29fb74c4d5b752e1c033da57" + integrity sha512-olpvt9QG0vniUBZspVRN6lwB7hOZoTRtT+jzR+tS4ffYx2mzbw+z0XCOk44aaLYKApNX5nMm+E+P6o25ip/DHQ== dependencies: - estraverse "^4.0.0" + estraverse "^5.1.0" esrecurse@^4.1.0: version "4.2.1" @@ -2505,11 +2760,16 @@ esrecurse@^4.1.0: dependencies: estraverse "^4.1.0" -estraverse@^4.0.0, estraverse@^4.1.0, estraverse@^4.1.1: +estraverse@^4.1.0, estraverse@^4.1.1: version "4.3.0" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== +estraverse@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.1.0.tgz#374309d39fd935ae500e7b92e8a6b4c720e59642" + integrity sha512-FyohXK+R0vE+y1nHLoBM7ZTyqRpqAlhdZHCWIWEviFLiGB8b04H6bQs8G+XTthacvT8VuwvteiP7RJSxMs8UEw== + esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" @@ -2521,9 +2781,9 @@ eventemitter3@^4.0.0: integrity sha512-rlaVLnVxtxvoyLsQQFBx53YmXHDxRIzzTLbdfxqi4yocpSjAxXwkU0cScM5JgSKMqEhrZpnvQ2D9gjylR0AimQ== events@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/events/-/events-3.0.0.tgz#9a0a0dfaf62893d92b875b8f2698ca4114973e88" - integrity sha512-Dc381HFWJzEOhQ+d8pkNon++bk9h6cdAoAj4iE6Q4y6xgTzySWXlKn05/TVNpjnfRqi/X0EpJEJohPjNI3zpVA== + version "3.2.0" + resolved "https://registry.yarnpkg.com/events/-/events-3.2.0.tgz#93b87c18f8efcd4202a461aec4dfc0556b639379" + integrity sha512-/46HWwbfCX2xTawVfkKLGxMifJYQBWMwY1mjywRtb4c9x8l5NP3KoJtnIOiL1hfdRkIuYhETxQlo62IF8tcnlg== evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: version "1.0.3" @@ -2593,15 +2853,6 @@ extend@^3.0.0: resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== -external-editor@^2.0.4: - version "2.2.0" - resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-2.2.0.tgz#045511cfd8d133f3846673d1047c154e214ad3d5" - integrity sha512-bSn6gvGxKt+b7+6TKEv1ZycHleA7aHhRHyAqJyp5pbUFuYYNIzpZnQDk7AsYckyWdEnTeAnay0aCy2aV6iTk9A== - dependencies: - chardet "^0.4.0" - iconv-lite "^0.4.17" - tmp "^0.0.33" - extglob@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" @@ -2621,10 +2872,10 @@ fast-deep-equal@^1.0.0: resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz#c053477817c86b51daa853c81e059b733d023614" integrity sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ= -fast-deep-equal@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49" - integrity sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk= +fast-deep-equal@^3.1.1: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== fast-glob@^3.1.1: version "3.2.2" @@ -2638,12 +2889,24 @@ fast-glob@^3.1.1: micromatch "^4.0.2" picomatch "^2.2.1" +fast-glob@^3.2.4: + version "3.2.4" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.4.tgz#d20aefbf99579383e7f3cc66529158c9b98554d3" + integrity sha512-kr/Oo6PX51265qeuCYsyGypiO5uJFgBS0jksyG7FUeCyQzNwYnzrNIMR1NXfkZXsMYXYLRAHgISHBz8gQcxKHQ== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.0" + merge2 "^1.3.0" + micromatch "^4.0.2" + picomatch "^2.2.1" + fast-json-stable-stringify@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== -fast-levenshtein@~2.0.6: +fast-levenshtein@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= @@ -2666,24 +2929,9 @@ fastq@^1.6.0: reusify "^1.0.4" figgy-pudding@^3.5.1: - version "3.5.1" - resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.1.tgz#862470112901c727a0e495a80744bd5baa1d6790" - integrity sha512-vNKxJHTEKNThjfrdJwHc7brvM6eVevuO5nTj6ez8ZQ1qbXTvGthucRF7S4vf2cr71QVnT70V34v0S1DyQsti0w== - -figures@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962" - integrity sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI= - dependencies: - escape-string-regexp "^1.0.5" - -file-entry-cache@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-2.0.0.tgz#c392990c3e684783d838b8c84a45d8a048458361" - integrity sha1-w5KZDD5oR4PYOLjISkXYoEhFg2E= - dependencies: - flat-cache "^1.2.1" - object-assign "^4.0.1" + version "3.5.2" + resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e" + integrity sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw== file-entry-cache@^5.0.1: version "5.0.1" @@ -2692,13 +2940,13 @@ file-entry-cache@^5.0.1: dependencies: flat-cache "^2.0.1" -file-loader@^1.1.11: - version "1.1.11" - resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-1.1.11.tgz#6fe886449b0f2a936e43cabaac0cdbfb369506f8" - integrity sha512-TGR4HU7HUsGg6GCOPJnFk06RhWgEWFLAGWiT6rcD+GRC2keU3s9RGJ+b3Z6/U73jwwNb2gKLJ7YCrp+jvU4ALg== +file-loader@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-6.0.0.tgz#97bbfaab7a2460c07bcbd72d3a6922407f67649f" + integrity sha512-/aMOAYEFXDdjG0wytpTL5YQLfZnnTmLNjn+AIrJ/6HVnTfDqLsVKUUwkDf4I4kgex36BvjuXEn/TX9B/1ESyqQ== dependencies: - loader-utils "^1.0.2" - schema-utils "^0.4.5" + loader-utils "^2.0.0" + schema-utils "^2.6.5" file-uri-to-path@1.0.0: version "1.0.0" @@ -2722,15 +2970,6 @@ fill-range@^7.0.1: dependencies: to-regex-range "^5.0.1" -find-cache-dir@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-1.0.0.tgz#9288e3e9e3cc3748717d39eade17cf71fc30ee6f" - integrity sha1-kojj6ePMN0hxfTnq3hfPcfww7m8= - dependencies: - commondir "^1.0.1" - make-dir "^1.0.0" - pkg-dir "^2.0.0" - find-cache-dir@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" @@ -2740,6 +2979,20 @@ find-cache-dir@^2.1.0: make-dir "^2.0.0" pkg-dir "^3.0.0" +find-cache-dir@^3.3.1: + version "3.3.1" + resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.1.tgz#89b33fad4a4670daa94f855f7fbe31d6d84fe880" + integrity sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ== + dependencies: + commondir "^1.0.1" + make-dir "^3.0.2" + pkg-dir "^4.1.0" + +find-root@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/find-root/-/find-root-1.1.0.tgz#abcfc8ba76f708c42a97b3d685b7e9450bfb9ce4" + integrity sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng== + find-up@^2.0.0, find-up@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" @@ -2754,7 +3007,7 @@ find-up@^3.0.0: dependencies: locate-path "^3.0.0" -find-up@^4.1.0: +find-up@^4.0.0, find-up@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== @@ -2762,7 +3015,7 @@ find-up@^4.1.0: locate-path "^5.0.0" path-exists "^4.0.0" -findup-sync@3.0.0: +findup-sync@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1" integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg== @@ -2772,16 +3025,6 @@ findup-sync@3.0.0: micromatch "^3.0.4" resolve-dir "^1.0.1" -flat-cache@^1.2.1: - version "1.3.4" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-1.3.4.tgz#2c2ef77525cc2929007dfffa1dd314aa9c9dee6f" - integrity sha512-VwyB3Lkgacfik2vhqR4uv2rvebqmDvFu4jlN/C1RzWoJEo8I7z4Q404oiqYCkq41mni8EzQnm95emU9seckwtg== - dependencies: - circular-json "^0.3.1" - graceful-fs "^4.1.2" - rimraf "~2.6.2" - write "^0.2.1" - flat-cache@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-2.0.1.tgz#5d296d6f04bda44a4630a301413bdbc2ec085ec0" @@ -2843,6 +3086,13 @@ fs-extra@^7.0.0: jsonfile "^4.0.0" universalify "^0.1.0" +fs-minipass@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" + integrity sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg== + dependencies: + minipass "^3.0.0" + fs-write-stream-atomic@^1.0.8: version "1.0.10" resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" @@ -2859,13 +3109,18 @@ fs.realpath@^1.0.0: integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= fsevents@^1.2.7: - version "1.2.12" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.12.tgz#db7e0d8ec3b0b45724fd4d83d43554a8f1f0de5c" - integrity sha512-Ggd/Ktt7E7I8pxZRbGIs7vwqAPscSESMrCSkx2FtWeqmheJgCo2R74fTsZFCifr0VTPwqRpPv17+6b8Zp7th0Q== + version "1.2.13" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.13.tgz#f325cb0455592428bcf11b383370ef70e3bfcc38" + integrity sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw== dependencies: bindings "^1.5.0" nan "^2.12.1" +fsevents@~2.1.2: + version "2.1.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.3.tgz#fb738703ae8d2f9fe900c33836ddebee8b97f23e" + integrity sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ== + function-bind@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" @@ -2898,10 +3153,10 @@ get-caller-file@^2.0.1: resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== -get-stdin@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-7.0.0.tgz#8d5de98f15171a125c5e516643c7a6d0ea8a96f6" - integrity sha512-zRKcywvrXlXsA0v0i9Io4KDRaAw7+a1ZpjRwl9Wox8PFlVCCHra7E9c4kqXCoCM9nR5tBkaTTZRBoCm60bFqTQ== +get-stdin@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-8.0.0.tgz#cbad6a73feb75f6eeb22ba9e01f89aa28aa97a53" + integrity sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg== get-stream@^4.0.0: version "4.1.0" @@ -2923,7 +3178,7 @@ glob-parent@^3.1.0: is-glob "^3.1.0" path-dirname "^1.0.0" -glob-parent@^5.1.0: +glob-parent@^5.0.0, glob-parent@^5.1.0, glob-parent@^5.1.1, glob-parent@~5.1.0: version "5.1.1" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.1.tgz#b6c1ef417c4e5663ea498f1c45afac6916bbc229" integrity sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ== @@ -2935,7 +3190,7 @@ glob-to-regexp@^0.3.0: resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.3.0.tgz#8c5a1494d2066c570cc3bfe4496175acc4d502ab" integrity sha1-jFoUlNIGbFcMw7/kSWF1rMTVAqs= -glob@^7.1.2, glob@^7.1.3, glob@^7.1.4: +glob@^7.0.3, glob@^7.1.3, glob@^7.1.4: version "7.1.6" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== @@ -2947,13 +3202,6 @@ glob@^7.1.2, glob@^7.1.3, glob@^7.1.4: once "^1.3.0" path-is-absolute "^1.0.0" -global-modules@2.0.0, global-modules@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" - integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== - dependencies: - global-prefix "^3.0.0" - global-modules@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" @@ -2963,6 +3211,13 @@ global-modules@^1.0.0: is-windows "^1.0.1" resolve-dir "^1.0.0" +global-modules@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" + integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== + dependencies: + global-prefix "^3.0.0" + global-prefix@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" @@ -2983,20 +3238,27 @@ global-prefix@^3.0.0: kind-of "^6.0.2" which "^1.3.1" -globals@^11.0.1, globals@^11.1.0: +globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== +globals@^12.1.0: + version "12.4.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-12.4.0.tgz#a18813576a41b00a24a97e7f815918c2e19925f8" + integrity sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg== + dependencies: + type-fest "^0.8.1" + globals@^9.18.0: version "9.18.0" resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ== -globby@^11.0.0: - version "11.0.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.0.tgz#56fd0e9f0d4f8fb0c456f1ab0dee96e1380bc154" - integrity sha512-iuehFnR3xu5wBBtm4xi0dMe92Ob87ufyu/dHwpDYfbcpYpIbrO5OnS8M1vWvrBhSGEJ3/Ecj7gnX76P8YxpPEg== +globby@^11.0.1: + version "11.0.1" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.1.tgz#9a2bf107a068f3ffeabc49ad702c79ede8cfd357" + integrity sha512-iH9RmgwCmUJHi2z5o2l3eTtGBtXek1OYlHrbcxOYugyHLmAsZrPj43OtHThd62Buh/Vv6VyCBD2bdyWcGNQqoQ== dependencies: array-union "^2.1.0" dir-glob "^3.0.1" @@ -3005,17 +3267,16 @@ globby@^11.0.0: merge2 "^1.3.0" slash "^3.0.0" -globby@^7.1.1: - version "7.1.1" - resolved "https://registry.yarnpkg.com/globby/-/globby-7.1.1.tgz#fb2ccff9401f8600945dfada97440cca972b8680" - integrity sha1-+yzP+UAfhgCUXfral0QMypcrhoA= +globby@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" + integrity sha1-9abXDoOV4hyFj7BInWTfAkJNUGw= dependencies: array-union "^1.0.1" - dir-glob "^2.0.0" - glob "^7.1.2" - ignore "^3.3.5" - pify "^3.0.0" - slash "^1.0.0" + glob "^7.0.3" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" globjoin@^0.1.4: version "0.1.4" @@ -3036,7 +3297,12 @@ good-listener@^1.2.2: dependencies: delegate "^3.1.2" -graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6: +graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2: + version "4.2.4" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb" + integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw== + +graceful-fs@^4.1.6: version "4.2.3" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423" integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ== @@ -3053,18 +3319,19 @@ graphlib@^2.1.8: dependencies: lodash "^4.17.15" -handlebars@^4.5.3: - version "4.7.2" - resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.2.tgz#01127b3840156a0927058779482031afe0e730d7" - integrity sha512-4PwqDL2laXtTWZghzzCtunQUTLbo31pcCJrd/B/9JP8XbhVzpS5ZXuKqlOzsd1rtcaLo4KqAn8nl8mkknS4MHw== +handlebars@^4.7.6: + version "4.7.6" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.6.tgz#d4c05c1baf90e9945f77aa68a7a219aa4a7df74e" + integrity sha512-1f2BACcBfiwAfStCKZNrUCgqNZkGsAT7UM3kkYtXuLo0KnaVfjKOyf7PRzB6++aK9STyT1Pd2ZCPe3EGOXleXA== dependencies: + minimist "^1.2.5" neo-async "^2.6.0" - optimist "^0.6.1" source-map "^0.6.1" + wordwrap "^1.0.0" optionalDependencies: uglify-js "^3.1.4" -hard-rejection@^2.0.0: +hard-rejection@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/hard-rejection/-/hard-rejection-2.1.0.tgz#1c6eda5c1685c63942766d79bb40ae773cecd883" integrity sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA== @@ -3130,12 +3397,13 @@ has@^1.0.3: function-bind "^1.1.1" hash-base@^3.0.0: - version "3.0.4" - resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918" - integrity sha1-X8hoaEfs1zSZQDMZprCj8/auSRg= + version "3.1.0" + resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.1.0.tgz#55c381d9e06e1d2997a883b4a3fddfe7f0d3af33" + integrity sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA== dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" + inherits "^2.0.4" + readable-stream "^3.6.0" + safe-buffer "^5.2.0" hash.js@^1.0.0, hash.js@^1.0.3: version "1.1.7" @@ -3179,7 +3447,7 @@ html-tags@^3.1.0: resolved "https://registry.yarnpkg.com/html-tags/-/html-tags-3.1.0.tgz#7b5e6f7e665e9fb41f30007ed9e0d41e97fb2140" integrity sha512-1qYz89hW3lFDEazhjW0yVAV87lw8lVkrJocr72XmBkMKsoSVJCQx3W8BXsC7hO2qAt8BoVjYjtAcZ9perqGnNg== -htmlparser2@^3.10.0, htmlparser2@^3.8.2: +htmlparser2@^3.10.0: version "3.10.1" resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f" integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== @@ -3191,6 +3459,16 @@ htmlparser2@^3.10.0, htmlparser2@^3.8.2: inherits "^2.0.1" readable-stream "^3.1.1" +htmlparser2@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-4.1.0.tgz#9a4ef161f2e4625ebf7dfbe6c0a2f52d18a59e78" + integrity sha512-4zDq1a1zhE4gQso/c5LP1OtrhYTncXNSpvJYtWJBtXAETPlMfi3IFNjGuQbYLuVY4ZR0QMqRVvo4Pdy9KLyP8Q== + dependencies: + domelementtype "^2.0.1" + domhandler "^3.0.0" + domutils "^2.0.0" + entities "^2.0.0" + http2-client@^1.2.5: version "1.3.3" resolved "https://registry.yarnpkg.com/http2-client/-/http2-client-1.3.3.tgz#90fc15d646cca86956b156d07c83947d57d659a9" @@ -3201,7 +3479,7 @@ https-browserify@^1.0.0: resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= -iconv-lite@0.4, iconv-lite@^0.4.17: +iconv-lite@0.4: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== @@ -3237,22 +3515,22 @@ iferr@^0.1.5: resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= -ignore@^3.3.3, ignore@^3.3.5: - version "3.3.10" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043" - integrity sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug== - -ignore@^4.0.2: +ignore@^4.0.6: version "4.0.6" resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== +ignore@^5.1.1, ignore@^5.1.8: + version "5.1.8" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.8.tgz#f150a8b50a34289b33e22f5889abd4d8016f0e57" + integrity sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw== + ignore@^5.1.4: version "5.1.4" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.4.tgz#84b7b3dbe64552b6ef0eca99f6743dbec6d97adf" integrity sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A== -import-fresh@^3.1.0: +import-fresh@^3.0.0, import-fresh@^3.1.0: version "3.2.1" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.2.1.tgz#633ff618506e793af5ac91bf48b72677e15cbe66" integrity sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ== @@ -3265,7 +3543,7 @@ import-lazy@^4.0.0: resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-4.0.0.tgz#e8eb627483a0a43da3c03f3e35548be5cb0cc153" integrity sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw== -import-local@2.0.0: +import-local@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== @@ -3273,13 +3551,15 @@ import-local@2.0.0: pkg-dir "^3.0.0" resolve-cwd "^2.0.0" -imports-loader@^0.8.0: - version "0.8.0" - resolved "https://registry.yarnpkg.com/imports-loader/-/imports-loader-0.8.0.tgz#030ea51b8ca05977c40a3abfd9b4088fe0be9a69" - integrity sha512-kXWL7Scp8KQ4552ZcdVTeaQCZSLW+e6nJfp3cwUMB673T7Hr98Xjx5JK+ql7ADlJUvj1JS5O01RLbKoutN5QDQ== +imports-loader@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/imports-loader/-/imports-loader-1.1.0.tgz#1c3a388d0c5cd7f9eb08f3646d4aae3b70e57933" + integrity sha512-HcPM6rULdQ6EBLVq+5O+CF9xb7qiUjsRm6V28bTG/c3IU5sQkVZzUDwYY0r4jHvSAmVFdO9WA/vLAURR5WQSeQ== dependencies: - loader-utils "^1.0.2" + loader-utils "^2.0.0" + schema-utils "^2.7.0" source-map "^0.6.1" + strip-comments "^2.0.1" imurmurhash@^0.1.4: version "0.1.4" @@ -3296,7 +3576,7 @@ indexes-of@^1.0.1: resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" integrity sha1-8w9xbI4r00bHtn0985FVZqfAVgc= -infer-owner@^1.0.3: +infer-owner@^1.0.3, infer-owner@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== @@ -3309,7 +3589,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@^2.0.0, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.1, inherits@~2.0.3: +inherits@2, inherits@^2.0.0, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -3329,32 +3609,12 @@ ini@^1.3.4, ini@^1.3.5: resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw== -inquirer@^3.0.6: - version "3.3.0" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-3.3.0.tgz#9dd2f2ad765dcab1ff0443b491442a20ba227dc9" - integrity sha512-h+xtnyk4EwKvFWHrUYsWErEVR+igKtLdchu+o0Z1RL7VU/jVMFbYir2bp6bAj8efFNxWqHX0dIss6fJQ+/+qeQ== - dependencies: - ansi-escapes "^3.0.0" - chalk "^2.0.0" - cli-cursor "^2.1.0" - cli-width "^2.0.0" - external-editor "^2.0.4" - figures "^2.0.0" - lodash "^4.3.0" - mute-stream "0.0.7" - run-async "^2.2.0" - rx-lite "^4.0.8" - rx-lite-aggregates "^4.0.8" - string-width "^2.1.0" - strip-ansi "^4.0.0" - through "^2.3.6" - -interpret@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296" - integrity sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw== +interpret@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e" + integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== -invariant@^2.2.0, invariant@^2.2.2: +invariant@^2.2.2: version "2.2.4" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== @@ -3410,6 +3670,13 @@ is-binary-path@^1.0.0: dependencies: binary-extensions "^1.0.0" +is-binary-path@~2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + is-buffer@^1.1.5: version "1.1.6" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" @@ -3425,6 +3692,11 @@ is-callable@^1.1.4: resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.4.tgz#1e1adf219e1eeb684d691f9d6a05ff0d30a24d75" integrity sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA== +is-callable@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.0.tgz#83336560b54a38e35e3a2df7afd0454d691468bb" + integrity sha512-pyVD9AaGLxtg6srb2Ng6ynWJqkHU9bEM087AKck0w8QwDarTfNcpIYoU8x8Hv2Icm8u6kFJM18Dag8lyqGkviw== + is-data-descriptor@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" @@ -3515,7 +3787,7 @@ is-glob@^3.1.0: dependencies: is-extglob "^2.1.0" -is-glob@^4.0.0, is-glob@^4.0.1: +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== @@ -3539,7 +3811,26 @@ is-number@^7.0.0: resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== -is-plain-obj@^1.1.0: +is-path-cwd@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" + integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== + +is-path-in-cwd@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb" + integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== + dependencies: + is-path-inside "^2.1.0" + +is-path-inside@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2" + integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== + dependencies: + path-is-inside "^1.0.2" + +is-plain-obj@^1.0.0, is-plain-obj@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= @@ -3556,11 +3847,6 @@ is-plain-object@^2.0.3, is-plain-object@^2.0.4: dependencies: isobject "^3.0.1" -is-promise@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" - integrity sha1-eaKp7OfwlugPNtKy87wWwf9L8/o= - is-regex@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.5.tgz#39d589a358bf18967f726967120b8fc1aed74eae" @@ -3568,21 +3854,28 @@ is-regex@^1.0.4: dependencies: has "^1.0.3" +is-regex@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.0.tgz#ece38e389e490df0dc21caea2bd596f987f767ff" + integrity sha512-iI97M8KTWID2la5uYXlkbSDQIg4F6o1sYboZKKTDpnDQMLtUL86zxhgDet3Q2SriaYsyGqZ6Mn2SjbRKeLHdqw== + dependencies: + has-symbols "^1.0.1" + is-regexp@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-regexp/-/is-regexp-2.1.0.tgz#cd734a56864e23b956bf4e7c66c396a4c0b22c2d" integrity sha512-OZ4IlER3zmRIoB9AqNhEggVxqIH4ofDns5nRrPS6yQxXE1TPCUpFznBfRQmQa8uC+pXqjMnukiJBxCisIxiLGA== -is-resolvable@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.1.0.tgz#fb18f87ce1feb925169c9a407c19318a3206ed88" - integrity sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg== - is-stream@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= +is-string@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.5.tgz#40493ed198ef3ff477b8c7f92f644ec82a5cd3a6" + integrity sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ== + is-symbol@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" @@ -3642,25 +3935,17 @@ jquery@>=1.7, jquery@>=3.4.0, "jquery@^1.8.3 || ^2.0 || ^3.0": resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.4.1.tgz#714f1f8d9dde4bdfa55764ba37ef214630d80ef2" integrity sha512-36+AdBzCL+y6qjw5Tx7HgzeGCzC81MDDgaUP8ld2zhx58HdqXGoBd+tHdrBMiyjGQs0Hxs/MLZTu/eHNJJuWPw== -js-tokens@^3.0.0, js-tokens@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" - integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= - "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== -js-yaml@>=3.13.1, js-yaml@^3.9.1: - version "3.13.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" - integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" +js-tokens@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" + integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= -js-yaml@^3.12.1: +js-yaml@^3.12.1, js-yaml@^3.13.1, js-yaml@^3.14.0: version "3.14.0" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.0.tgz#a7a34170f26a21bb162424d8adacb4113a69e482" integrity sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A== @@ -3754,9 +4039,9 @@ jsonfile@^4.0.0: graceful-fs "^4.1.6" jsonpointer@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-4.0.1.tgz#4fd92cb34e0e9db3c89c8622ecf51f9b978c6cb9" - integrity sha1-T9kss04OnbPInIYi7PUfm5eMbLk= + version "4.1.0" + resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-4.1.0.tgz#501fb89986a2389765ba09e6053299ceb4f2c2cc" + integrity sha512-CXcRvMyTlnR53xMcKnuMzfCA5i/nfblTnnr74CZb6C4vG39eu6w51t7nKmU5MfLfbTgGItliNyjO/ciNPDqClg== kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" @@ -3777,15 +4062,15 @@ kind-of@^5.0.0: resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== -kind-of@^6.0.0, kind-of@^6.0.2: - version "6.0.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" - integrity sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA== +kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== -known-css-properties@^0.18.0: - version "0.18.0" - resolved "https://registry.yarnpkg.com/known-css-properties/-/known-css-properties-0.18.0.tgz#d6e00b56ee1d5b0d171fd86df1583cfb012c521f" - integrity sha512-69AgJ1rQa7VvUsd2kpvVq+VeObDuo3zrj0CzM5Slmf6yduQFAI2kXPDQJR2IE/u6MSAUOJrwSzjg5vlz8qcMiw== +known-css-properties@^0.19.0: + version "0.19.0" + resolved "https://registry.yarnpkg.com/known-css-properties/-/known-css-properties-0.19.0.tgz#5d92b7fa16c72d971bda9b7fe295bdf61836ee5b" + integrity sha512-eYboRV94Vco725nKMlpkn3nV2+96p9c3gKXRsYqAJSswSENvBhN7n5L+uDhY58xQa0UukWsDMTGELzmD8Q+wTA== lcid@^2.0.0: version "2.0.0" @@ -3799,13 +4084,13 @@ leven@^3.1.0: resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== -levn@^0.3.0, levn@~0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" - integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== dependencies: - prelude-ls "~1.1.2" - type-check "~0.3.2" + prelude-ls "^1.2.1" + type-check "~0.4.0" lines-and-columns@^1.1.6: version "1.1.6" @@ -3827,15 +4112,6 @@ loader-runner@^2.4.0: resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357" integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== -loader-utils@1.2.3, loader-utils@^1.0.2, loader-utils@^1.1.0, loader-utils@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7" - integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== - dependencies: - big.js "^5.2.2" - emojis-list "^2.0.0" - json5 "^1.0.1" - loader-utils@^0.2.16: version "0.2.17" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348" @@ -3846,6 +4122,33 @@ loader-utils@^0.2.16: json5 "^0.5.0" object-assign "^4.0.1" +loader-utils@^1.1.0: + version "1.2.3" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7" + integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== + dependencies: + big.js "^5.2.2" + emojis-list "^2.0.0" + json5 "^1.0.1" + +loader-utils@^1.2.3, loader-utils@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.0.tgz#c579b5e34cb34b1a74edc6c1fb36bfa371d5a613" + integrity sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA== + dependencies: + big.js "^5.2.2" + emojis-list "^3.0.0" + json5 "^1.0.1" + +loader-utils@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.0.tgz#e4cace5b816d425a166b5f097e10cd12b36064b0" + integrity sha512-rP4F0h2RaWSvPEkD7BLDFQnvSf+nK+wr3ESUjNTyAGobqrijmW92zc+SO6d4p4B1wh7+B/Jg1mkQe5NYUEHtHQ== + dependencies: + big.js "^5.2.2" + emojis-list "^3.0.0" + json5 "^2.1.2" + locate-path@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" @@ -3874,11 +4177,16 @@ lodash.difference@^4.5.0: resolved "https://registry.yarnpkg.com/lodash.difference/-/lodash.difference-4.5.0.tgz#9ccb4e505d486b91651345772885a2df27fd017c" integrity sha1-nMtOUF1Ia5FlE0V3KIWi3yf9AXw= -"lodash@>=3.5 <5", lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.3.0: +"lodash@>=3.5 <5", lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.4, lodash@^4.3.0: version "4.17.15" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548" integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A== +lodash@^4.17.13, lodash@^4.17.19: + version "4.17.19" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b" + integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ== + log-symbols@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" @@ -3886,12 +4194,12 @@ log-symbols@^2.2.0: dependencies: chalk "^2.0.1" -log-symbols@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-3.0.0.tgz#f3a08516a5dea893336a7dee14d18a1cfdab77c4" - integrity sha512-dSkNGuI7iG3mfvDzUuYZyvk5dD9ocYCYzNU6CYDE6+Xqd+gwme6Z00NS3dUh8mq/73HaEtT7m6W+yUPtU6BZnQ== +log-symbols@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.0.0.tgz#69b3cc46d20f448eccdb75ea1fa733d9e821c920" + integrity sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA== dependencies: - chalk "^2.4.2" + chalk "^4.0.0" longest-streak@^2.0.1: version "2.0.4" @@ -3905,14 +4213,6 @@ loose-envify@^1.0.0, loose-envify@^1.4.0: dependencies: js-tokens "^3.0.0 || ^4.0.0" -lru-cache@^4.0.1: - version "4.1.5" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" - integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== - dependencies: - pseudomap "^1.0.2" - yallist "^2.1.2" - lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" @@ -3920,18 +4220,18 @@ lru-cache@^5.1.1: dependencies: yallist "^3.0.2" +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + lunr@2.3.8: version "2.3.8" resolved "https://registry.yarnpkg.com/lunr/-/lunr-2.3.8.tgz#a8b89c31f30b5a044b97d2d28e2da191b6ba2072" integrity sha512-oxMeX/Y35PNFuZoHp+jUj5OSEmLCaIH4KTFJh7a93cHBoFmpw2IoPs22VIz7vyO2YUnx2Tn9dzIwO2P/4quIRg== -make-dir@^1.0.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.3.0.tgz#79c1033b80515bd6d24ec9933e860ca75ee27f0c" - integrity sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ== - dependencies: - pify "^3.0.0" - make-dir@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" @@ -3940,6 +4240,13 @@ make-dir@^2.0.0: pify "^4.0.1" semver "^5.6.0" +make-dir@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + map-age-cleaner@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" @@ -4026,7 +4333,7 @@ memoize-one@~5.1.1: resolved "https://registry.yarnpkg.com/memoize-one/-/memoize-one-5.1.1.tgz#047b6e3199b508eaec03504de71229b8eb1d75c0" integrity sha512-HKeeBpWvqiVJD57ZUAsJNm71eHTykffzcLZVYWiVfQeI1rJtuEaS7hQiEpWfVVk18donPwJEcFKIkCmPJNOhHA== -memory-fs@^0.4.0, memory-fs@^0.4.1: +memory-fs@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= @@ -4042,22 +4349,24 @@ memory-fs@^0.5.0: errno "^0.1.3" readable-stream "^2.0.1" -meow@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/meow/-/meow-6.1.0.tgz#4ff4641818d3502afcddc631f94cb6971a581cb3" - integrity sha512-iIAoeI01v6pmSfObAAWFoITAA4GgiT45m4SmJgoxtZfvI0fyZwhV4d0lTwiUXvAKIPlma05Feb2Xngl52Mj5Cg== +meow@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/meow/-/meow-7.0.1.tgz#1ed4a0a50b3844b451369c48362eb0515f04c1dc" + integrity sha512-tBKIQqVrAHqwit0vfuFPY3LlzJYkEOFyKa3bPgxzNl6q/RtN8KQ+ALYEASYuFayzSAsjlhXj/JZ10rH85Q6TUw== dependencies: "@types/minimist" "^1.2.0" - camelcase-keys "^6.1.1" + arrify "^2.0.1" + camelcase "^6.0.0" + camelcase-keys "^6.2.2" decamelize-keys "^1.1.0" - hard-rejection "^2.0.0" - minimist-options "^4.0.1" + hard-rejection "^2.1.0" + minimist-options "^4.0.2" normalize-package-data "^2.5.0" - read-pkg-up "^7.0.0" + read-pkg-up "^7.0.1" redent "^3.0.0" trim-newlines "^3.0.0" - type-fest "^0.8.1" - yargs-parser "^18.1.1" + type-fest "^0.13.1" + yargs-parser "^18.1.3" merge2@^1.3.0: version "1.3.0" @@ -4099,15 +4408,17 @@ miller-rabin@^4.0.0: bn.js "^4.0.0" brorand "^1.0.1" -mime@^2.0.3: - version "2.4.4" - resolved "https://registry.yarnpkg.com/mime/-/mime-2.4.4.tgz#bd7b91135fc6b01cde3e9bae33d659b63d8857e5" - integrity sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA== +mime-db@1.44.0: + version "1.44.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.44.0.tgz#fa11c5eb0aca1334b4233cb4d52f10c5a6272f92" + integrity sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg== -mimic-fn@^1.0.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" - integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== +mime-types@^2.1.26: + version "2.1.27" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.27.tgz#47949f98e279ea53119f5722e0f34e529bec009f" + integrity sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w== + dependencies: + mime-db "1.44.0" mimic-fn@^2.0.0: version "2.1.0" @@ -4119,12 +4430,13 @@ min-indent@^1.0.0: resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.0.tgz#cfc45c37e9ec0d8f0a0ec3dd4ef7f7c3abe39256" integrity sha1-z8RcN+nsDY8KDsPdTvf3w6vjklY= -mini-css-extract-plugin@^0.4.1: - version "0.4.5" - resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-0.4.5.tgz#c99e9e78d54f3fa775633aee5933aeaa4e80719a" - integrity sha512-dqBanNfktnp2hwL2YguV9Jh91PFX7gu7nRLs4TGsbAfAG6WOtlynFRYzwDwmmeSb5uIwHo9nx1ta0f7vAZVp2w== +mini-css-extract-plugin@0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-0.9.0.tgz#47f2cf07aa165ab35733b1fc97d4c46c0564339e" + integrity sha512-lp3GeY7ygcgAmVIcRPBVhIkf8Us7FZjA+ILpal44qLdSu11wmjKQ3d9k15lfD7pO4esu9eUIAW7qiYIBppv40A== dependencies: loader-utils "^1.1.0" + normalize-url "1.9.1" schema-utils "^1.0.0" webpack-sources "^1.1.0" @@ -4138,40 +4450,62 @@ minimalistic-crypto-utils@^1.0.0, minimalistic-crypto-utils@^1.0.1: resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= -minimatch@^3.0.2, minimatch@^3.0.4: +minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" -minimist-options@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/minimist-options/-/minimist-options-4.0.2.tgz#29c4021373ded40d546186725e57761e4b1984a7" - integrity sha512-seq4hpWkYSUh1y7NXxzucwAN9yVlBc3Upgdjz8vLCP97jG8kaOmzYrVH/m7tQ1NYD1wdtZbSLfdy4zFmRWuc/w== +minimist-options@^4.0.2: + version "4.1.0" + resolved "https://registry.yarnpkg.com/minimist-options/-/minimist-options-4.1.0.tgz#c0655713c53a8a2ebd77ffa247d342c40f010619" + integrity sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A== dependencies: arrify "^1.0.1" is-plain-obj "^1.1.0" + kind-of "^6.0.3" -minimist@0.0.8: - version "0.0.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" - integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0= - -minimist@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" - integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ= - -minimist@^1.2.5: +minimist@^1.2.0, minimist@^1.2.5: version "1.2.5" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== -minimist@~0.0.1: - version "0.0.10" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" - integrity sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8= +minipass-collect@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/minipass-collect/-/minipass-collect-1.0.2.tgz#22b813bf745dc6edba2576b940022ad6edc8c617" + integrity sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA== + dependencies: + minipass "^3.0.0" + +minipass-flush@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/minipass-flush/-/minipass-flush-1.0.5.tgz#82e7135d7e89a50ffe64610a787953c4c4cbb373" + integrity sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw== + dependencies: + minipass "^3.0.0" + +minipass-pipeline@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/minipass-pipeline/-/minipass-pipeline-1.2.3.tgz#55f7839307d74859d6e8ada9c3ebe72cec216a34" + integrity sha512-cFOknTvng5vqnwOpDsZTWhNll6Jf8o2x+/diplafmxpuIymAjzoOolZG0VvQf3V2HgqzJNhnuKHYp2BqDgz8IQ== + dependencies: + minipass "^3.0.0" + +minipass@^3.0.0, minipass@^3.1.1: + version "3.1.3" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.1.3.tgz#7d42ff1f39635482e15f9cdb53184deebd5815fd" + integrity sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg== + dependencies: + yallist "^4.0.0" + +minizlib@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.0.tgz#fd52c645301ef09a63a2c209697c294c6ce02cf3" + integrity sha512-EzTZN/fjSvifSX0SlqUERCN39o6T40AMarPbv0MrarSFtIITCBh7bi+dU8nxGFHuqs9jdIAeoYoKuQAAASsPPA== + dependencies: + minipass "^3.0.0" + yallist "^4.0.0" mississippi@^3.0.0: version "3.0.0" @@ -4197,20 +4531,18 @@ mixin-deep@^1.2.0: for-in "^1.0.2" is-extendable "^1.0.1" -mkdirp@^0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" - integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM= - dependencies: - minimist "0.0.8" - -mkdirp@^0.5.3: - version "0.5.4" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.4.tgz#fd01504a6797ec5c9be81ff43d204961ed64a512" - integrity sha512-iG9AK/dJLtJ0XNgTuDbSyNS3zECqDlAhnQW4CsNxBG3LQJBbHmRX1egw39DmtOdCAqY+dKXV+sgPgilNWUKMVw== +mkdirp@^0.5.1, mkdirp@^0.5.3: + version "0.5.5" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" + integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== dependencies: minimist "^1.2.5" +mkdirp@^1.0.3, mkdirp@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" + integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== + mobx-react-lite@^1.4.2: version "1.5.2" resolved "https://registry.yarnpkg.com/mobx-react-lite/-/mobx-react-lite-1.5.2.tgz#c4395b0568b9cb16f07669d8869cc4efa1b8656d" @@ -4271,15 +4603,10 @@ ms@^2.1.1: resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -mute-stream@0.0.7: - version "0.0.7" - resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab" - integrity sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s= - nan@^2.12.1: - version "2.14.0" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" - integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== + version "2.14.1" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.1.tgz#d7be34dfa3105b91494c3147089315eff8874b01" + integrity sha512-isWHgVjnFjh2x2yuJ/tj3JbwoHu3UC2dX5G/88Cm24yB6YopVgxvBObDY7n5xW6ExmFhJpSEQqFPvq9zaXc8Jw== nanomatch@^1.2.9: version "1.2.13" @@ -4303,7 +4630,12 @@ natural-compare@^1.4.0: resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= -neo-async@^2.5.0, neo-async@^2.6.0, neo-async@^2.6.1: +neo-async@^2.5.0, neo-async@^2.6.1: + version "2.6.2" + resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + +neo-async@^2.6.0: version "2.6.1" resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.1.tgz#ac27ada66167fa8849a6addd837f6b189ad2081c" integrity sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw== @@ -4356,10 +4688,17 @@ node-readfiles@^0.2.0: dependencies: es6-promise "^3.2.1" -node-releases@^1.1.53: - version "1.1.53" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.53.tgz#2d821bfa499ed7c5dffc5e2f28c88e78a08ee3f4" - integrity sha512-wp8zyQVwef2hpZ/dJH7SfSrIPD6YoJz6BDQDpGEkcA0s3LpAQoxBIYmfIq6QAhC1DhwsyCgTaTTcONwX8qzCuQ== +node-releases@^1.1.58: + version "1.1.60" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.60.tgz#6948bdfce8286f0b5d0e5a88e8384e954dfe7084" + integrity sha512-gsO4vjEdQaTusZAEebUWp2a5d7dF5DYoIpDG7WySnk7BuZDW+GPpHXoXXuYawRBr/9t5q54tirPz79kFIWg4dA== + +nopt@~1.0.10: + version "1.0.10" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-1.0.10.tgz#6ddd21bd2a31417b92727dd585f8a6f37608ebee" + integrity sha1-bd0hvSoxQXuScn3Vhfim83YI6+4= + dependencies: + abbrev "1" normalize-package-data@^2.3.2, normalize-package-data@^2.5.0: version "2.5.0" @@ -4378,7 +4717,7 @@ normalize-path@^2.1.1: dependencies: remove-trailing-separator "^1.0.1" -normalize-path@^3.0.0: +normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== @@ -4393,6 +4732,16 @@ normalize-selector@^0.2.0: resolved "https://registry.yarnpkg.com/normalize-selector/-/normalize-selector-0.2.0.tgz#d0b145eb691189c63a78d201dc4fdb1293ef0c03" integrity sha1-0LFF62kRicY6eNIB3E/bEpPvDAM= +normalize-url@1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-1.9.1.tgz#2cc0d66b31ea23036458436e3620d85954c66c3c" + integrity sha1-LMDWazHqIwNkWENuNiDYWVTGbDw= + dependencies: + object-assign "^4.0.1" + prepend-http "^1.0.0" + query-string "^4.1.0" + sort-keys "^1.0.0" + npm-run-path@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" @@ -4423,21 +4772,21 @@ oas-kit-common@^1.0.7, oas-kit-common@^1.0.8: fast-safe-stringify "^2.0.7" oas-linter@^3.1.0: - version "3.1.2" - resolved "https://registry.yarnpkg.com/oas-linter/-/oas-linter-3.1.2.tgz#ff5eae63290cd2c0ef195bae9bcf5e91b6884c19" - integrity sha512-mv3HBG9aQz8PLGvonewIN9Y2Ra8QL6jvotRvf7NCdZ20n5vg4dO4y61UZh6s+KRDfJaU1PO+9Oxrn3EUN4Xygw== + version "3.1.3" + resolved "https://registry.yarnpkg.com/oas-linter/-/oas-linter-3.1.3.tgz#1526b3da32a1bbf124d720f27fd4eb9971cebfff" + integrity sha512-jFWBHjSoqODGo7cKA/VWqqWSLbHNtnyCEpa2nMMS64SzCUbZDk63Oe7LqQZ2qJA0K2VRreYLt6cVkYy6MqNRDg== dependencies: should "^13.2.1" yaml "^1.8.3" oas-resolver@^2.3.0: - version "2.3.2" - resolved "https://registry.yarnpkg.com/oas-resolver/-/oas-resolver-2.3.2.tgz#990a722ddcafe9b0b85893362963b829190b236f" - integrity sha512-toGCUv8wyZZmUAAsw4jn+511xNpUFW2ZLp4sAZ7xpERIeosrbxBxtkVxot9kXvdUHtPjRafi5+bkJ56TwQeYSQ== + version "2.4.1" + resolved "https://registry.yarnpkg.com/oas-resolver/-/oas-resolver-2.4.1.tgz#46948226f73e514ac6733f166cc559e800e4389b" + integrity sha512-rRmUv9mDTKPtsB2OGaoNMK4BC1Q/pL+tWRPKRjXJEBoLmfegJhecOZPBtIR0gKEVQb9iAA0MqulkgY43EiCFDg== dependencies: node-fetch-h2 "^2.3.0" oas-kit-common "^1.0.8" - reftools "^1.1.1" + reftools "^1.1.3" yaml "^1.8.3" yargs "^15.3.1" @@ -4462,7 +4811,7 @@ oas-validator@^3.4.0: should "^13.2.1" yaml "^1.8.3" -object-assign@^4.0.1, object-assign@^4.1.1: +object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= @@ -4513,6 +4862,15 @@ object.entries@^1.1.0: function-bind "^1.1.1" has "^1.0.3" +object.entries@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.2.tgz#bc73f00acb6b6bb16c203434b10f9a7e797d3add" + integrity sha512-BQdB9qKmb/HyNdMNWVr7O3+z5MUIx3aiegEIJqjMBbBf0YT9RRxTJSim4mzFqtyr7PDAHigq0N9dO0m0tRakQA== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.5" + has "^1.0.3" + object.pick@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" @@ -4520,7 +4878,7 @@ object.pick@^1.3.0: dependencies: isobject "^3.0.1" -object.values@^1.1.0: +object.values@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.1.tgz#68a99ecde356b7e9295a3c5e0ce31dc8c953de5e" integrity sha512-WTa54g2K8iu0kmS/us18jEmdv1a4Wi//BZ/DTVYEcH0XhLM5NYdpDHja3gt57VrZLcNAO2WGA+KpWsDBaHt6eA== @@ -4533,16 +4891,9 @@ object.values@^1.1.0: once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= - dependencies: - wrappy "1" - -onetime@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" - integrity sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ= + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: - mimic-fn "^1.0.0" + wrappy "1" ono@^4.0.11: version "4.0.11" @@ -4551,32 +4902,24 @@ ono@^4.0.11: dependencies: format-util "^1.0.3" -openapi-sampler@1.0.0-beta.15: - version "1.0.0-beta.15" - resolved "https://registry.yarnpkg.com/openapi-sampler/-/openapi-sampler-1.0.0-beta.15.tgz#c087143826962fa07a0c7bda9ce5c36d732f45de" - integrity sha512-wUD/vD3iBHKik/sME3uwUu4X3HFA53rDrPcVvLzgEELjHLbnTpSYfm4Jo9qZT1dPfBRowAnrF/VRQfOjL5QRAw== +openapi-sampler@^1.0.0-beta.16: + version "1.0.0-beta.16" + resolved "https://registry.yarnpkg.com/openapi-sampler/-/openapi-sampler-1.0.0-beta.16.tgz#7813524d5b88d222efb772ceb5a809075d6d9174" + integrity sha512-05+GvwMagTY7GxoDQoWJfmAUFlxfebciiEzqKmu4iq6+MqBEn62AMUkn0CTxyKhnUGIaR2KXjTeslxIeJwVIOw== dependencies: json-pointer "^0.6.0" -optimist@^0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" - integrity sha1-2j6nRob6IaGaERwybpDrFaAZZoY= - dependencies: - minimist "~0.0.1" - wordwrap "~0.0.2" - -optionator@^0.8.2: - version "0.8.3" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" - integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== +optionator@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== dependencies: - deep-is "~0.1.3" - fast-levenshtein "~2.0.6" - levn "~0.3.0" - prelude-ls "~1.1.2" - type-check "~0.3.2" - word-wrap "~1.2.3" + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" os-browserify@^0.3.0: version "0.3.0" @@ -4588,7 +4931,7 @@ os-homedir@^1.0.0: resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= -os-locale@^3.0.0, os-locale@^3.1.0: +os-locale@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a" integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q== @@ -4597,7 +4940,7 @@ os-locale@^3.0.0, os-locale@^3.1.0: lcid "^2.0.0" mem "^4.0.0" -os-tmpdir@^1.0.1, os-tmpdir@~1.0.2: +os-tmpdir@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= @@ -4624,24 +4967,17 @@ p-limit@^1.1.0: dependencies: p-try "^1.0.0" -p-limit@^2.0.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.1.tgz#aa07a788cc3151c939b5131f63570f0dd2009537" - integrity sha512-85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg== - dependencies: - p-try "^2.0.0" - -p-limit@^2.2.0: +p-limit@^2.0.0, p-limit@^2.2.0: version "2.3.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== dependencies: p-try "^2.0.0" -p-limit@^2.2.1: - version "2.2.2" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.2.tgz#61279b67721f5287aa1c13a9a7fbbc48c9291b1e" - integrity sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ== +p-limit@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.0.2.tgz#1664e010af3cadc681baafd3e2a437be7b0fb5fe" + integrity sha512-iwqZSOoWIW+Ew4kAGUlN16J4M7OB3ysMLSZtnhmqx7njIHFPlxWBX8xo3lVTyFVq6mI/lL9qt2IsN1sHwaxJkg== dependencies: p-try "^2.0.0" @@ -4666,6 +5002,18 @@ p-locate@^4.1.0: dependencies: p-limit "^2.2.0" +p-map@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" + integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== + +p-map@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" + integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== + dependencies: + aggregate-error "^3.0.0" + p-try@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" @@ -4697,7 +5045,7 @@ parent-module@^1.0.0: dependencies: callsites "^3.0.0" -parse-asn1@^5.0.0: +parse-asn1@^5.0.0, parse-asn1@^5.1.5: version "5.1.5" resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.5.tgz#003271343da58dc94cace494faef3d2147ecea0e" integrity sha512-jkMYn1dcJqF6d5CpU689bq7w/b5ALS9ROVSpQDPrZsqqesUJii9qutvoT5ltGedNXMO2e16YUWIghG9KxaViTQ== @@ -4783,6 +5131,11 @@ path-key@^2.0.0, path-key@^2.0.1: resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= +path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + path-parse@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" @@ -4795,22 +5148,15 @@ path-type@^2.0.0: dependencies: pify "^2.0.0" -path-type@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f" - integrity sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg== - dependencies: - pify "^3.0.0" - path-type@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== pbkdf2@^3.0.3: - version "3.0.17" - resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.17.tgz#976c206530617b14ebb32114239f7b09336e93a6" - integrity sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA== + version "3.1.1" + resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.1.1.tgz#cb8724b0fada984596856d1a6ebafd3584654b94" + integrity sha512-4Ejy1OPxi9f2tt1rRV7Go7zmfDQ+ZectEQz3VGUQhgq62HtIRPDyG/JtnwIxs6x3uNMwo2V7q1fMvKjb+Tnpqg== dependencies: create-hash "^1.1.2" create-hmac "^1.1.4" @@ -4823,7 +5169,7 @@ perfect-scrollbar@^1.4.0: resolved "https://registry.yarnpkg.com/perfect-scrollbar/-/perfect-scrollbar-1.5.0.tgz#821d224ed8ff61990c23f26db63048cdc75b6b83" integrity sha512-NrNHJn5mUGupSiheBTy6x+6SXCFbLlm8fVZh9moIzw/LgqElN5q4ncR4pbCBCYuCJ8Kcl9mYM0NgDxvW+b4LxA== -picomatch@^2.0.5, picomatch@^2.2.1: +picomatch@^2.0.4, picomatch@^2.0.5, picomatch@^2.2.1: version "2.2.2" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad" integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg== @@ -4833,16 +5179,23 @@ pify@^2.0.0: resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= -pify@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" - integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= - pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== +pinkie-promise@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" + integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= + dependencies: + pinkie "^2.0.0" + +pinkie@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" + integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= + pkg-dir@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b" @@ -4857,17 +5210,12 @@ pkg-dir@^3.0.0: dependencies: find-up "^3.0.0" -pkg-up@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-2.0.0.tgz#c819ac728059a461cab1c3889a2be3c49a004d7f" - integrity sha1-yBmscoBZpGHKscOImivjxJoATX8= +pkg-dir@^4.1.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== dependencies: - find-up "^2.1.0" - -pluralize@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-7.0.0.tgz#298b89df8b93b0221dbf421ad2b1b1ea23fc6777" - integrity sha512-ARhBOdzS3e41FbkW/XWrTEtukqqLoK5+Z/4UeDaLuSW+39JPeFgs4gCGqsrJHVZX0fUrx//4OF0K1CUGwlIFow== + find-up "^4.0.0" polished@^3.4.4: version "3.6.5" @@ -4949,10 +5297,10 @@ postcss-modules-scope@^1.0.0: css-selector-tokenizer "^0.7.0" postcss "^6.0.1" -postcss-modules-scope@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-2.1.1.tgz#33d4fc946602eb5e9355c4165d68a10727689dba" - integrity sha512-OXRUPecnHCg8b9xWvldG/jUpRIGPNRka0r4D4j0ESUU2/5IOnpsjfPPmDprM3Ih8CgZ8FXjWqaniK5v4rWt3oQ== +postcss-modules-scope@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-2.2.0.tgz#385cae013cc7743f5a7d7602d1073a89eaae62ee" + integrity sha512-YyEgsTMRpNd+HmyC7H/mh3y+MeFWevy7V1evVhJWewmMbjDHIbZbOXICC2y+m1xI1UVfIT1HMW/O04Hxyu9oXQ== dependencies: postcss "^7.0.6" postcss-selector-parser "^6.0.0" @@ -5003,12 +5351,12 @@ postcss-sass@^0.4.4: gonzales-pe "^4.3.0" postcss "^7.0.21" -postcss-scss@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/postcss-scss/-/postcss-scss-2.0.0.tgz#248b0a28af77ea7b32b1011aba0f738bda27dea1" - integrity sha512-um9zdGKaDZirMm+kZFKKVsnKPF7zF7qBAtIfTSnZXD1jZ0JNZIxdB6TxQOjCnlSzLRInVl2v3YdBh/M881C4ug== +postcss-scss@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/postcss-scss/-/postcss-scss-2.1.1.tgz#ec3a75fa29a55e016b90bf3269026c53c1d2b383" + integrity sha512-jQmGnj0hSGLd9RscFw9LyuSVAa5Bl1/KBPqG1NQw9w8ND55nY4ZEsdlVuYJvLPpV+y0nwTV5v/4rHPzZRihQbA== dependencies: - postcss "^7.0.0" + postcss "^7.0.6" postcss-selector-parser@^6.0.0, postcss-selector-parser@^6.0.2: version "6.0.2" @@ -5024,15 +5372,10 @@ postcss-syntax@^0.36.2: resolved "https://registry.yarnpkg.com/postcss-syntax/-/postcss-syntax-0.36.2.tgz#f08578c7d95834574e5593a82dfbfa8afae3b51c" integrity sha512-nBRg/i7E3SOHWxF3PpF5WnJM/jQ1YpY9000OaVXlAQj6Zp/kIqJxEDWIZ67tAd7NLuk7zqN4yqe9nc0oNAOs1w== -postcss-value-parser@^4.0.0, postcss-value-parser@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.0.2.tgz#482282c09a42706d1fc9a069b73f44ec08391dc9" - integrity sha512-LmeoohTpp/K4UiyQCwuGWlONxXamGzCMtFxLq4W1nZVGIQLYvMCJx3yAF9qyyuFpflABI9yVdtJAqbihOsCsJQ== - -postcss-value-parser@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.0.3.tgz#651ff4593aa9eda8d5d0d66593a2417aeaeb325d" - integrity sha512-N7h4pG+Nnu5BEIzyeaaIYWs0LI5XC40OrRh5L60z0QjFsqGWcHcbkBvpe1WYpcIS9yQ8sOi/vIPt1ejQCrMVrg== +postcss-value-parser@^4.0.0, postcss-value-parser@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.1.0.tgz#443f6a20ced6481a2bda4fa8532a6e55d789a2cb" + integrity sha512-97DXOFbQJhk71ne5/Mt6cOu6yxsSfM0QGQyl0L25Gca4yGWEGJaig7l7gbCX623VqTBNGLRLaVUCnNkcedlRSQ== postcss@^6.0.1, postcss@^6.0.2: version "6.0.23" @@ -5043,28 +5386,33 @@ postcss@^6.0.1, postcss@^6.0.2: source-map "^0.6.1" supports-color "^5.4.0" -postcss@^7.0.0, postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.26, postcss@^7.0.27, postcss@^7.0.7: - version "7.0.27" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.27.tgz#cc67cdc6b0daa375105b7c424a85567345fc54d9" - integrity sha512-WuQETPMcW9Uf1/22HWUWP9lgsIC+KEHg2kozMflKjbeUtw9ujvFX6QmIfozaErDkmLWS9WEnEdEe6Uo9/BNTdQ== +postcss@^7.0.14, postcss@^7.0.16, postcss@^7.0.32, postcss@^7.0.5, postcss@^7.0.6: + version "7.0.32" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.32.tgz#4310d6ee347053da3433db2be492883d62cec59d" + integrity sha512-03eXong5NLnNCD05xscnGKGDZ98CyzoqPSMjOe6SuoQY7Z2hIj0Ld1g/O/UQRuOle2aRtiIRDg9tDcTGAkLfKw== dependencies: chalk "^2.4.2" source-map "^0.6.1" supports-color "^6.1.0" -postcss@^7.0.14, postcss@^7.0.16, postcss@^7.0.23, postcss@^7.0.5, postcss@^7.0.6: - version "7.0.26" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.26.tgz#5ed615cfcab35ba9bbb82414a4fa88ea10429587" - integrity sha512-IY4oRjpXWYshuTDFxMVkJDtWIk2LhsTlu8bZnbEJA4+bYT16Lvpo8Qv6EvDumhYRgzjZl489pmsY3qVgJQ08nA== +postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.26, postcss@^7.0.7: + version "7.0.27" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.27.tgz#cc67cdc6b0daa375105b7c424a85567345fc54d9" + integrity sha512-WuQETPMcW9Uf1/22HWUWP9lgsIC+KEHg2kozMflKjbeUtw9ujvFX6QmIfozaErDkmLWS9WEnEdEe6Uo9/BNTdQ== dependencies: chalk "^2.4.2" source-map "^0.6.1" supports-color "^6.1.0" -prelude-ls@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" - integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prepend-http@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" + integrity sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw= prismjs@^1.19.0: version "1.20.0" @@ -5112,11 +5460,6 @@ prr@~1.0.1: resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= -pseudomap@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" - integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= - public-encrypt@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" @@ -5169,6 +5512,14 @@ punycode@^2.1.0: resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== +query-string@^4.1.0: + version "4.3.4" + resolved "https://registry.yarnpkg.com/query-string/-/query-string-4.3.4.tgz#bbb693b9ca915c232515b228b1a02b609043dbeb" + integrity sha1-u7aTucqRXCMlFbIosaArYJBD2+s= + dependencies: + object-assign "^4.1.0" + strict-uri-encode "^1.0.0" + querystring-es3@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" @@ -5184,7 +5535,7 @@ quick-lru@^4.0.1: resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-4.0.1.tgz#5b8878f113a58217848c6482026c73e1ba57727f" integrity sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g== -randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5: +randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5, randombytes@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== @@ -5199,12 +5550,12 @@ randomfill@^1.0.3: randombytes "^2.0.5" safe-buffer "^5.1.0" -react-dropdown@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/react-dropdown/-/react-dropdown-1.7.0.tgz#20287aafabdece49a6595ebe40e3fa1a37c26456" - integrity sha512-zFZ73pgLA32hArpE4j/7DtOEhOMg240XG5QvbAb0/VinGekkHDVIakMyAFUKC5jDz8jqXEltgriqFW9R5iCtPQ== +react-dropdown-aria@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/react-dropdown-aria/-/react-dropdown-aria-2.0.6.tgz#40cec5edd97a591d2f29e8c05aa8c53230e2aa6e" + integrity sha512-/9NlFopChlSKmuGL2P6S3oDwl9ddXcbNLnd1a7POov4f5/oGtSc3qBFmS4wH5xmLJe/38MhPOKF3e2q3laRi1g== dependencies: - classnames "^2.2.3" + emotion "^9.2.6" react-is@^16.8.1: version "16.13.1" @@ -5227,7 +5578,7 @@ read-pkg-up@^2.0.0: find-up "^2.0.0" read-pkg "^2.0.0" -read-pkg-up@^7.0.0: +read-pkg-up@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507" integrity sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg== @@ -5255,20 +5606,7 @@ read-pkg@^5.2.0: parse-json "^5.0.0" type-fest "^0.6.0" -"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.6, readable-stream@~2.3.6: - version "2.3.6" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" - integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - -readable-stream@^2.0.2, readable-stream@^2.3.3: +"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: version "2.3.7" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== @@ -5290,6 +5628,15 @@ readable-stream@^3.1.1: string_decoder "^1.1.1" util-deprecate "^1.0.1" +readable-stream@^3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" + integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + readdirp@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" @@ -5299,6 +5646,13 @@ readdirp@^2.2.1: micromatch "^3.1.10" readable-stream "^2.0.2" +readdirp@~3.4.0: + version "3.4.0" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.4.0.tgz#9fdccdf9e9155805449221ac645e8303ab5b9ada" + integrity sha512-0xe001vZBnJEK+uKcj8qOhyAKPzIT+gStxWr3LCB0DwcXR5NZJ3IaC+yGnHCYzB/S7ov3m3EEbZI2zeNvX+hGQ== + dependencies: + picomatch "^2.2.1" + redent@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" @@ -5308,9 +5662,9 @@ redent@^3.0.0: strip-indent "^3.0.0" redoc@^2.0.0-rc.30: - version "2.0.0-rc.30" - resolved "https://registry.yarnpkg.com/redoc/-/redoc-2.0.0-rc.30.tgz#1b0fa646123656d648c4b983bad045b32e01fc32" - integrity sha512-HlaBKzRkNRyaQYhKOsT2ExG04ueG+mx7BdehDIswZkXRP9qmxCcDZOovuU2uUnA1JZcK3IOvV9MjAfnH4a3x8w== + version "2.0.0-rc.33" + resolved "https://registry.yarnpkg.com/redoc/-/redoc-2.0.0-rc.33.tgz#df43f533bb0cc283cc209d69d2a91404a24bd8d1" + integrity sha512-1KLdnOU1aBIddgNBcEIU29h3VqXoTT493gT5hjyHg6sE91x9qEVWPYM2A+eETQFz5ygTwkBCp6xZDxVs+HIA9w== dependencies: "@types/node" "^13.11.1" classnames "^2.2.6" @@ -5324,12 +5678,12 @@ redoc@^2.0.0-rc.30: marked "^0.7.0" memoize-one "~5.1.1" mobx-react "6.1.5" - openapi-sampler "1.0.0-beta.15" + openapi-sampler "^1.0.0-beta.16" perfect-scrollbar "^1.4.0" polished "^3.4.4" prismjs "^1.19.0" prop-types "^15.7.2" - react-dropdown "^1.7.0" + react-dropdown-aria "^2.0.6" react-tabs "^3.1.0" slugify "^1.4.0" stickyfill "^1.1.1" @@ -5337,10 +5691,10 @@ redoc@^2.0.0-rc.30: tslib "^1.11.1" url-template "^2.0.8" -reftools@^1.1.0, reftools@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/reftools/-/reftools-1.1.1.tgz#09cff48f62c09cae40bd1fe9683f5412d2a4656d" - integrity sha512-7ySkzK7YpUeJP16rzJqEXTZ7IrAq/AL/p+wWejD9wdKQOe+mYYVAOB3w5ZTs2eoHfmAidwr/6PcC+q+LzPF/DQ== +reftools@^1.1.0, reftools@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/reftools/-/reftools-1.1.3.tgz#f430d11677d81ae97b8dbb3836713bb52b1cd0a7" + integrity sha512-JTlhKmSzqE/gt5Z5RX25yZDq67MlRRtTz1gLy/NY+wPDx1e1vEJsv1PoNrpKZBwitcEMXs2k7pzmbmraP1ZMAQ== regenerate@^1.2.1: version "1.4.0" @@ -5358,9 +5712,9 @@ regenerator-runtime@^0.11.0: integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== regenerator-runtime@^0.13.4: - version "0.13.5" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz#d878a1d094b4306d10b9096484b33ebd55e26697" - integrity sha512-ZS5w8CpKFinUzOwW3c83oPeVXoNsrLsaCoLtJvAClH135j/R77RuymhiSErhm2lKcwSCIpmvIWSbDkIfAqKQlA== + version "0.13.7" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz#cac2dacc8a1ea675feaabaeb8ae833898ae46f55" + integrity sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew== regex-not@^1.0.0, regex-not@^1.0.2: version "1.0.2" @@ -5370,15 +5724,10 @@ regex-not@^1.0.0, regex-not@^1.0.2: extend-shallow "^3.0.2" safe-regex "^1.1.0" -regexpp@^1.0.1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-1.1.0.tgz#0e3516dd0b7904f413d2d4193dce4618c3a689ab" - integrity sha512-LOPw8FpgdQF9etWMaAfG/WRthIdXJGYp4mJ2Jgn/2lpkbod9jPn0t9UqN7AxBOKNfzRbYyVfgc7Vk4t/MpnXgw== - -regexpp@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-2.0.1.tgz#8d19d31cf632482b589049f8281f93dbcba4d07f" - integrity sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw== +regexpp@^3.0.0, regexpp@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.1.0.tgz#206d0ad0a5648cffbdb8ae46438f3dc51c9f78e2" + integrity sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q== regexpu-core@^1.0.0: version "1.0.0" @@ -5494,14 +5843,6 @@ require-main-filename@^2.0.0: resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== -require-uncached@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/require-uncached/-/require-uncached-1.0.3.tgz#4e0d56d6c9662fd31e43011c4b95aa49955421d3" - integrity sha1-Tg1W1slmL9MeQwEcS5WqSZVUIdM= - dependencies: - caller-path "^0.1.0" - resolve-from "^1.0.0" - resolve-cwd@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" @@ -5517,11 +5858,6 @@ resolve-dir@^1.0.0, resolve-dir@^1.0.1: expand-tilde "^2.0.0" global-modules "^1.0.0" -resolve-from@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-1.0.1.tgz#26cbfe935d1aeeeabb29bc3fe5aeb01e93d44226" - integrity sha1-Jsv+k10a7uq7Kbw/5a6wHpPUQiY= - resolve-from@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" @@ -5542,13 +5878,20 @@ resolve-url@^0.2.1: resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= -resolve@^1.10.0, resolve@^1.12.0, resolve@^1.5.0, resolve@^1.8.1: +resolve@^1.10.0, resolve@^1.12.0: version "1.14.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.14.0.tgz#6d14c6f9db9f8002071332b600039abf82053f64" integrity sha512-uviWSi5N67j3t3UKFxej1loCH0VZn5XuqdNxoLShPcYPw6cUZn74K1VRj+9myynRX03bxIBEkwlkob/ujLsJVw== dependencies: path-parse "^1.0.6" +resolve@^1.10.1, resolve@^1.13.1, resolve@^1.17.0: + version "1.17.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.17.0.tgz#b25941b54968231cc2d1bb76a79cb7f2c0bf8444" + integrity sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w== + dependencies: + path-parse "^1.0.6" + resolve@^1.3.2: version "1.15.1" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.15.1.tgz#27bdcdeffeaf2d6244b95bb0f9f4b4653451f3e8" @@ -5556,14 +5899,6 @@ resolve@^1.3.2: dependencies: path-parse "^1.0.6" -restore-cursor@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" - integrity sha1-n37ih/gv0ybU/RYpI9YhKe7g368= - dependencies: - onetime "^2.0.0" - signal-exit "^3.0.2" - ret@~0.1.10: version "0.1.15" resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" @@ -5574,20 +5909,27 @@ reusify@^1.0.4: resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== -rimraf@2.6.3, rimraf@~2.6.2: +rimraf@2.6.3: version "2.6.3" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== dependencies: glob "^7.1.3" -rimraf@^2.5.4, rimraf@^2.6.1, rimraf@^2.6.3: +rimraf@^2.5.4, rimraf@^2.6.3: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" +rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + ripemd160@^2.0.0, ripemd160@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" @@ -5596,13 +5938,6 @@ ripemd160@^2.0.0, ripemd160@^2.0.1: hash-base "^3.0.0" inherits "^2.0.1" -run-async@^2.2.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.3.0.tgz#0371ab4ae0bdd720d4166d7dfda64ff7a445a6c0" - integrity sha1-A3GrSuC91yDUFm19/aZP96RFpsA= - dependencies: - is-promise "^2.1.0" - run-parallel@^1.1.9: version "1.1.9" resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.1.9.tgz#c9dd3a7cf9f4b2c4b6244e173a6ed866e61dd679" @@ -5620,22 +5955,10 @@ rw@1: resolved "https://registry.yarnpkg.com/rw/-/rw-1.3.3.tgz#3f862dfa91ab766b14885ef4d01124bfda074fb4" integrity sha1-P4Yt+pGrdmsUiF700BEkv9oHT7Q= -rx-lite-aggregates@^4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/rx-lite-aggregates/-/rx-lite-aggregates-4.0.8.tgz#753b87a89a11c95467c4ac1626c4efc4e05c67be" - integrity sha1-dTuHqJoRyVRnxKwWJsTvxOBcZ74= - dependencies: - rx-lite "*" - -rx-lite@*, rx-lite@^4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-4.0.8.tgz#0b1e11af8bc44836f04a6407e92da42467b79444" - integrity sha1-Cx4Rr4vESDbwSmQH6S2kJGe3lEQ= - -safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@~5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519" - integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg== +safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" @@ -5654,14 +5977,6 @@ safe-regex@^1.1.0: resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== -schema-utils@^0.4.5: - version "0.4.7" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.4.7.tgz#ba74f597d2be2ea880131746ee17d0a093c68187" - integrity sha512-v/iwU6wvwGK8HbU9yi3/nhGzP0yGSuhQMzL6ySiec1FSrZZDkhm4noOSWzrNFo/jEc+SJY6jRTwuwbSXJPDUnQ== - dependencies: - ajv "^6.1.0" - ajv-keywords "^3.1.0" - schema-utils@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" @@ -5671,12 +5986,13 @@ schema-utils@^1.0.0: ajv-errors "^1.0.0" ajv-keywords "^3.1.0" -schema-utils@^2.6.0: - version "2.6.4" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.6.4.tgz#a27efbf6e4e78689d91872ee3ccfa57d7bdd0f53" - integrity sha512-VNjcaUxVnEeun6B2fiiUDjXXBtD4ZSH7pdbfIu1pOFwgptDPLMo/z9jr4sUfsjFVPqDCEin/F7IYlq7/E6yDbQ== +schema-utils@^2.6.5, schema-utils@^2.6.6, schema-utils@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.0.tgz#17151f76d8eae67fbbf77960c33c676ad9f4efc7" + integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== dependencies: - ajv "^6.10.2" + "@types/json-schema" "^7.0.4" + ajv "^6.12.2" ajv-keywords "^3.4.1" seekout@^1.0.1: @@ -5689,15 +6005,34 @@ select@^1.1.2: resolved "https://registry.yarnpkg.com/select/-/select-1.1.2.tgz#0e7350acdec80b1108528786ec1d4418d11b396d" integrity sha1-DnNQrN7ICxEIUoeG7B1EGNEbOW0= -"semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.6.0: +"semver@2 || 3 || 4 || 5", semver@^5.4.1, semver@^5.5.0, semver@^5.6.0: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== -serialize-javascript@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-2.1.2.tgz#ecec53b0e0317bdc95ef76ab7074b7384785fa61" - integrity sha512-rs9OggEUF0V4jUSecXazOYsLfu7OGK2qIn3c7IPBiffz32XniEp/TX9Xmc9LQfK2nQ2QKHvZ2oygKUGU0lG4jQ== +semver@^6.0.0, semver@^6.1.0, semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +semver@^7.2.1: + version "7.3.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.2.tgz#604962b052b81ed0786aae84389ffba70ffd3938" + integrity sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ== + +serialize-javascript@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-3.1.0.tgz#8bf3a9170712664ef2561b44b691eafe399214ea" + integrity sha512-JIJT1DGiWmIKhzRsG91aS6Ze4sFUrYbltlkg2onR5OrnNM02Kl/hnY/T4FN2omvyeBbQmMJv+K4cPOpGzOTFBg== + dependencies: + randombytes "^2.1.0" + +serialize-javascript@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa" + integrity sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw== + dependencies: + randombytes "^2.1.0" set-blocking@^2.0.0: version "2.0.0" @@ -5734,11 +6069,23 @@ shebang-command@^1.2.0: dependencies: shebang-regex "^1.0.0" +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + shebang-regex@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + should-equal@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/should-equal/-/should-equal-2.0.0.tgz#6072cf83047360867e68e98b09d71143d04ee0c3" @@ -5783,7 +6130,12 @@ should@^13.2.1: should-type-adaptors "^1.0.1" should-util "^1.0.0" -signal-exit@^3.0.0, signal-exit@^3.0.2: +signal-exit@^3.0.0: + version "3.0.3" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" + integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== + +signal-exit@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= @@ -5798,13 +6150,6 @@ slash@^3.0.0: resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== -slice-ansi@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-1.0.0.tgz#044f1a49d8842ff307aad6b505ed178bd950134d" - integrity sha512-POqxBK6Lb3q6s047D/XsDVNPnF9Dl8JSaqe9h9lURl0OdNqy/ujDrOiIHtsqXMGbWWTIomRzAMaTyawAU//Reg== - dependencies: - is-fullwidth-code-point "^2.0.0" - slice-ansi@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-2.1.0.tgz#cacd7693461a637a5788d92a7dd4fba068e81636" @@ -5815,9 +6160,9 @@ slice-ansi@^2.1.0: is-fullwidth-code-point "^2.0.0" slugify@^1.4.0: - version "1.4.2" - resolved "https://registry.yarnpkg.com/slugify/-/slugify-1.4.2.tgz#2c9b2b3321c43110a0058982eea7102b998d5068" - integrity sha512-7UfMG5rtkxfOI5jg/+f4DMQS3ikUqfWnfMvitrhwdTV4pibWXq9mN4RNLHSV3M1lR++x7z+AG7znsiozdBP+aA== + version "1.4.4" + resolved "https://registry.yarnpkg.com/slugify/-/slugify-1.4.4.tgz#2f032ffa52b1e1ca2a27737c1ce47baae3d0883a" + integrity sha512-N2+9NJ8JzfRMh6PQLrBeDEnVDQZSytE/W4BTC4fNNPmO90Uu58uNwSlIJSs+lmPgWsaAF79WLhVPe5tuy7spjw== snapdragon-node@^2.0.1: version "2.1.1" @@ -5849,17 +6194,24 @@ snapdragon@^0.8.1: source-map-resolve "^0.5.0" use "^3.1.0" +sort-keys@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-1.1.2.tgz#441b6d4d346798f1b4e49e8920adfba0e543f9ad" + integrity sha1-RBttTTRnmPG05J6JIK37oOVD+a0= + dependencies: + is-plain-obj "^1.0.0" + source-list-map@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== source-map-resolve@^0.5.0: - version "0.5.2" - resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" - integrity sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA== + version "0.5.3" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" + integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== dependencies: - atob "^2.1.1" + atob "^2.1.2" decode-uri-component "^0.2.0" resolve-url "^0.2.1" source-map-url "^0.4.0" @@ -5873,9 +6225,9 @@ source-map-support@^0.4.15: source-map "^0.5.6" source-map-support@~0.5.12: - version "0.5.16" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042" - integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ== + version "0.5.19" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61" + integrity sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" @@ -5895,6 +6247,11 @@ source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.1: resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== +source-map@^0.7.2, source-map@^0.7.3: + version "0.7.3" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" + integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== + spdx-correct@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4" @@ -5945,6 +6302,13 @@ ssri@^6.0.1: dependencies: figgy-pudding "^3.5.1" +ssri@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/ssri/-/ssri-8.0.0.tgz#79ca74e21f8ceaeddfcb4b90143c458b8d988808" + integrity sha512-aq/pz989nxVYwn16Tsbj1TqFpD5LLrQxHf5zaHuieFV+R0Bbr4y8qUsOA45hXT/N4/9UNXTarBjnjVmjSOVaAA== + dependencies: + minipass "^3.1.1" + state-toggle@^1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/state-toggle/-/state-toggle-1.0.3.tgz#e123b16a88e143139b09c6852221bc9815917dfe" @@ -5995,6 +6359,11 @@ stream-shift@^1.0.0: resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d" integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ== +strict-uri-encode@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" + integrity sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM= + string-width@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" @@ -6004,7 +6373,7 @@ string-width@^1.0.1: is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" -string-width@^2.0.0, string-width@^2.1.0, string-width@^2.1.1: +string-width@^2.0.0, string-width@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== @@ -6030,6 +6399,14 @@ string-width@^4.1.0, string-width@^4.2.0: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.0" +string.prototype.trimend@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.1.tgz#85812a6b847ac002270f5808146064c995fb6913" + integrity sha512-LRPxFUaTtpqYsTeNKaFOw3R4bxIzWOnbQ837QfBylo8jIxtcbK/A/sMV7Q+OAV/vWo+7s25pOE10KYSjaSO06g== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.5" + string.prototype.trimleft@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz#6cc47f0d7eb8d62b0f3701611715a3954591d634" @@ -6046,6 +6423,14 @@ string.prototype.trimright@^2.1.0: define-properties "^1.1.3" function-bind "^1.1.1" +string.prototype.trimstart@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.1.tgz#14af6d9f34b053f7cfc89b72f8f2ee14b9039a54" + integrity sha512-XxZn+QpvrBI1FOcg6dIpxUPgWCPuNXvMD72aaRaUQv1eD4e/Qy8i/hFTe0BUmD60p/QA6bh1avmuPTfNjqVWRw== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.5" + string_decoder@^1.0.0, string_decoder@^1.1.1: version "1.3.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" @@ -6104,6 +6489,11 @@ strip-bom@^3.0.0: resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= +strip-comments@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-comments/-/strip-comments-2.0.1.tgz#4ad11c3fbcac177a67a40ac224ca339ca1c1ba9b" + integrity sha512-ZprKx+bBLXv067WTCALv8SSz5l2+XhpYCsVtSqlMnkAXMWDq+/ekVbl1ghqP9rUHTzv6sm/DwCOiYutU/yp1fw== + strip-eof@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" @@ -6116,18 +6506,18 @@ strip-indent@^3.0.0: dependencies: min-indent "^1.0.0" -strip-json-comments@~2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" - integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= +strip-json-comments@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== -style-loader@^0.21.0: - version "0.21.0" - resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-0.21.0.tgz#68c52e5eb2afc9ca92b6274be277ee59aea3a852" - integrity sha512-T+UNsAcl3Yg+BsPKs1vd22Fr8sVT+CJMtzqc6LEw9bbJZb43lm9GoeIfUcDEefBSWC0BhYbcdupV1GtI4DGzxg== +style-loader@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-1.2.1.tgz#c5cbbfbf1170d076cfdd86e0109c5bba114baa1a" + integrity sha512-ByHSTQvHLkWE9Ir5+lGbVOXhxX10fbprhLvdg96wedFZb4NDekDPxVKv5Fwmio+QcMlkkNfuK+5W1peQ5CUhZg== dependencies: - loader-utils "^1.1.0" - schema-utils "^0.4.5" + loader-utils "^2.0.0" + schema-utils "^2.6.6" style-search@^0.1.0: version "0.1.0" @@ -6146,37 +6536,37 @@ stylelint-config-standard@^20.0.0: dependencies: stylelint-config-recommended "^3.0.0" -stylelint@^13.3.1: - version "13.3.1" - resolved "https://registry.yarnpkg.com/stylelint/-/stylelint-13.3.1.tgz#815917e88d7528b24e6d0e6a9b3afb76b50efc26" - integrity sha512-jeeGwU7y/0l2YTL042U2U0W04J9JIO6bRpTM4S8npSzaO5GzBz4VFlVlMucFzZXkSylxppEx9R6p+DiDLJcrWw== +stylelint@^13.6.1: + version "13.6.1" + resolved "https://registry.yarnpkg.com/stylelint/-/stylelint-13.6.1.tgz#cc1d76338116d55e8ff2be94c4a4386c1239b878" + integrity sha512-XyvKyNE7eyrqkuZ85Citd/Uv3ljGiuYHC6UiztTR6sWS9rza8j3UeQv/eGcQS9NZz/imiC4GKdk1EVL3wst5vw== dependencies: - "@stylelint/postcss-css-in-js" "^0.37.0" + "@stylelint/postcss-css-in-js" "^0.37.1" "@stylelint/postcss-markdown" "^0.36.1" - autoprefixer "^9.7.5" + autoprefixer "^9.8.0" balanced-match "^1.0.0" - chalk "^4.0.0" + chalk "^4.1.0" cosmiconfig "^6.0.0" debug "^4.1.1" execall "^2.0.0" file-entry-cache "^5.0.1" - get-stdin "^7.0.0" + get-stdin "^8.0.0" global-modules "^2.0.0" - globby "^11.0.0" + globby "^11.0.1" globjoin "^0.1.4" html-tags "^3.1.0" - ignore "^5.1.4" + ignore "^5.1.8" import-lazy "^4.0.0" imurmurhash "^0.1.4" - known-css-properties "^0.18.0" + known-css-properties "^0.19.0" leven "^3.1.0" lodash "^4.17.15" - log-symbols "^3.0.0" + log-symbols "^4.0.0" mathml-tag-names "^2.1.3" - meow "^6.1.0" + meow "^7.0.1" micromatch "^4.0.2" normalize-selector "^0.2.0" - postcss "^7.0.27" + postcss "^7.0.32" postcss-html "^0.36.0" postcss-less "^3.1.4" postcss-media-query-parser "^0.2.3" @@ -6184,10 +6574,10 @@ stylelint@^13.3.1: postcss-resolve-nested-selector "^0.1.1" postcss-safe-parser "^4.0.2" postcss-sass "^0.4.4" - postcss-scss "^2.0.0" + postcss-scss "^2.1.1" postcss-selector-parser "^6.0.2" postcss-syntax "^0.36.2" - postcss-value-parser "^4.0.3" + postcss-value-parser "^4.1.0" resolve-from "^5.0.0" slash "^3.0.0" specificity "^0.4.1" @@ -6197,9 +6587,19 @@ stylelint@^13.3.1: sugarss "^2.0.0" svg-tags "^1.0.0" table "^5.4.6" - v8-compile-cache "^2.1.0" + v8-compile-cache "^2.1.1" write-file-atomic "^3.0.3" +stylis-rule-sheet@^0.0.10: + version "0.0.10" + resolved "https://registry.yarnpkg.com/stylis-rule-sheet/-/stylis-rule-sheet-0.0.10.tgz#44e64a2b076643f4b52e5ff71efc04d8c3c4a430" + integrity sha512-nTbZoaqoBnmK+ptANthb10ZRZOGC+EmTLLUxeYIuHNkEKcmKgXX1XWKkUBT2Ac4es3NybooPe0SmvKdhKJZAuw== + +stylis@^3.5.0: + version "3.5.4" + resolved "https://registry.yarnpkg.com/stylis/-/stylis-3.5.4.tgz#f665f25f5e299cf3d64654ab949a57c768b73fbe" + integrity sha512-8/3pSmthWM7lsPBKv7NXkzn2Uc9W7NotcwGNpJaa3k7WMM1XDCA4MgT5k/8BIexd5ydZdboXtU90XH9Ec4Bv/Q== + sugarss@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/sugarss/-/sugarss-2.0.0.tgz#ddd76e0124b297d40bf3cca31c8b22ecb43bc61d" @@ -6207,13 +6607,6 @@ sugarss@^2.0.0: dependencies: postcss "^7.0.2" -supports-color@6.1.0, supports-color@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" - integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== - dependencies: - has-flag "^3.0.0" - supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" @@ -6226,6 +6619,13 @@ supports-color@^5.3.0, supports-color@^5.4.0: dependencies: has-flag "^3.0.0" +supports-color@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" + integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== + dependencies: + has-flag "^3.0.0" + supports-color@^7.1.0: version "7.1.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.1.0.tgz#68e32591df73e25ad1c4b49108a2ec507962bfd1" @@ -6255,19 +6655,7 @@ swagger2openapi@^5.3.4: yaml "^1.8.3" yargs "^12.0.5" -table@4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/table/-/table-4.0.2.tgz#a33447375391e766ad34d3486e6e2aedc84d2e36" - integrity sha512-UUkEAPdSGxtRpiV9ozJ5cMTtYiqz7Ni1OGqLXRCynrvzdtR1p+cfOWe2RJLwvUG8hNanaSRjecIqwOjqeatDsA== - dependencies: - ajv "^5.2.3" - ajv-keywords "^2.1.0" - chalk "^2.1.0" - lodash "^4.17.4" - slice-ansi "1.0.0" - string-width "^2.1.1" - -table@^5.4.6: +table@^5.2.3, table@^5.4.6: version "5.4.6" resolved "https://registry.yarnpkg.com/table/-/table-5.4.6.tgz#1292d19500ce3f86053b05f0e8e7e4a3bb21079e" integrity sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug== @@ -6282,31 +6670,43 @@ tapable@^1.0.0, tapable@^1.1.3: resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== +tar@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/tar/-/tar-6.0.2.tgz#5df17813468a6264ff14f766886c622b84ae2f39" + integrity sha512-Glo3jkRtPcvpDlAs/0+hozav78yoXKFr+c4wgw62NNMO3oo4AaJdCo21Uu7lcwr55h39W2XD1LMERc64wtbItg== + dependencies: + chownr "^2.0.0" + fs-minipass "^2.0.0" + minipass "^3.0.0" + minizlib "^2.1.0" + mkdirp "^1.0.3" + yallist "^4.0.0" + terser-webpack-plugin@^1.4.3: - version "1.4.3" - resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.3.tgz#5ecaf2dbdc5fb99745fd06791f46fc9ddb1c9a7c" - integrity sha512-QMxecFz/gHQwteWwSo5nTc6UaICqN1bMedC5sMtUc7y3Ha3Q8y6ZO0iCR8pq4RJC8Hjf0FEPEHZqcMB/+DFCrA== + version "1.4.4" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.4.tgz#2c63544347324baafa9a56baaddf1634c8abfc2f" + integrity sha512-U4mACBHIegmfoEe5fdongHESNJWqsGU+W0S/9+BmYGVQDw1+c2Ow05TpMhxjPK1sRb7cuYq1BPl1e5YHJMTCqA== dependencies: cacache "^12.0.2" find-cache-dir "^2.1.0" is-wsl "^1.1.0" schema-utils "^1.0.0" - serialize-javascript "^2.1.2" + serialize-javascript "^3.1.0" source-map "^0.6.1" terser "^4.1.2" webpack-sources "^1.4.0" worker-farm "^1.7.0" terser@^4.1.2: - version "4.6.10" - resolved "https://registry.yarnpkg.com/terser/-/terser-4.6.10.tgz#90f5bd069ff456ddbc9503b18e52f9c493d3b7c2" - integrity sha512-qbF/3UOo11Hggsbsqm2hPa6+L4w7bkr+09FNseEe8xrcVD3APGLFqE+Oz1ZKAxjYnFsj80rLOfgAtJ0LNJjtTA== + version "4.8.0" + resolved "https://registry.yarnpkg.com/terser/-/terser-4.8.0.tgz#63056343d7c70bb29f3af665865a46fe03a0df17" + integrity sha512-EAPipTNeWsb/3wLPeup1tVPaXfIaU68xMnVdPafIL1TV05OhASArYyIfFvnvJCNrR2NIOvDVNNTFRa+Re2MWyw== dependencies: commander "^2.20.0" source-map "~0.6.1" source-map-support "~0.5.12" -text-table@~0.2.0: +text-table@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= @@ -6319,11 +6719,6 @@ through2@^2.0.0: readable-stream "~2.3.6" xtend "~4.0.1" -through@^2.3.6: - version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" - integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= - timers-browserify@^2.0.4: version "2.0.11" resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.11.tgz#800b1f3eee272e5bc53ee465a04d0e804c31211f" @@ -6336,13 +6731,6 @@ tiny-emitter@^2.0.0: resolved "https://registry.yarnpkg.com/tiny-emitter/-/tiny-emitter-2.1.0.tgz#1d1a56edfc51c43e863cbb5382a72330e3555423" integrity sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q== -tmp@^0.0.33: - version "0.0.33" - resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" - integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== - dependencies: - os-tmpdir "~1.0.2" - to-arraybuffer@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" @@ -6390,6 +6778,13 @@ to-regex@^3.0.1, to-regex@^3.0.2: regex-not "^1.0.2" safe-regex "^1.1.0" +touch@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/touch/-/touch-2.0.2.tgz#ca0b2a3ae3211246a61b16ba9e6cbf1596287164" + integrity sha512-qjNtvsFXTRq7IuMLweVgFxmEuQ6gLbRs2jQxL80TtZ31dEKWYIxRXquij6w6VimyDek5hD3PytljHmEtAs2u0A== + dependencies: + nopt "~1.0.10" + trim-newlines@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-3.0.0.tgz#79726304a6a898aa8373427298d54c2ee8b1cb30" @@ -6415,27 +6810,37 @@ trough@^1.0.0: resolved "https://registry.yarnpkg.com/trough/-/trough-1.0.5.tgz#b8b639cefad7d0bb2abd37d433ff8293efa5f406" integrity sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA== -tslib@^1.11.1: +tsconfig-paths@^3.9.0: + version "3.9.0" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz#098547a6c4448807e8fcb8eae081064ee9a3c90b" + integrity sha512-dRcuzokWhajtZWkQsDVKbWyY+jgcLC5sqJhg2PSgf4ZkH2aHPvaOY8YWGhmjb68b5qqTfasSsDO9k7RUiEmZAw== + dependencies: + "@types/json5" "^0.0.29" + json5 "^1.0.1" + minimist "^1.2.0" + strip-bom "^3.0.0" + +tslib@^1.11.1, tslib@^1.9.0: version "1.13.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.13.0.tgz#c881e13cc7015894ed914862d276436fa9a47043" integrity sha512-i/6DQjL8Xf3be4K/E6Wgpekn5Qasl1usyw++dAA35Ue5orEn65VIxOA+YvNNl9HV3qv70T7CNwjODHZrLwvd1Q== -tslib@^1.9.0: - version "1.10.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" - integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== - tty-browserify@0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= -type-check@~0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" - integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== dependencies: - prelude-ls "~1.1.2" + prelude-ls "^1.2.1" + +type-fest@^0.13.1: + version "0.13.1" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.13.1.tgz#0172cb5bce80b0bd542ea348db50c7e21834d934" + integrity sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg== type-fest@^0.6.0: version "0.6.0" @@ -6589,19 +6994,19 @@ urix@^0.1.0: resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= -url-loader@^1.0.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/url-loader/-/url-loader-1.1.2.tgz#b971d191b83af693c5e3fea4064be9e1f2d7f8d8" - integrity sha512-dXHkKmw8FhPqu8asTc1puBfe3TehOCo2+RmOOev5suNCIYBcT626kxiWg1NBVkwc4rO8BGa7gP70W7VXuqHrjg== +url-loader@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/url-loader/-/url-loader-4.1.0.tgz#c7d6b0d6b0fccd51ab3ffc58a78d32b8d89a7be2" + integrity sha512-IzgAAIC8wRrg6NYkFIJY09vtktQcsvU8V6HhtQj9PTefbYImzLB1hufqo4m+RyM5N3mLx5BqJKccgxJS+W3kqw== dependencies: - loader-utils "^1.1.0" - mime "^2.0.3" - schema-utils "^1.0.0" + loader-utils "^2.0.0" + mime-types "^2.1.26" + schema-utils "^2.6.5" -url-search-params-polyfill@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/url-search-params-polyfill/-/url-search-params-polyfill-6.0.0.tgz#f5e5fc230d56125f5b0ba67d9cbcd6555fa347e3" - integrity sha512-69Bl5s3SiEgcHe8SMpzLGOyag27BQeTeSaP/CfVHkKc/VdUHtNjaP2PnhshFVC021221ItueOzuMMGofZ/HDmQ== +url-search-params-polyfill@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/url-search-params-polyfill/-/url-search-params-polyfill-8.1.0.tgz#5c15b69687165bfd4f6c7d8a161d70d85385885b" + integrity sha512-MRG3vzXyG20BJ2fox50/9ZRoe+2h3RM7DIudVD2u/GY9MtayO1Dkrna76IUOak+uoUPVWbyR0pHCzxctP/eDYQ== url-template@^2.0.8: version "2.0.8" @@ -6640,20 +7045,10 @@ util@^0.11.0: dependencies: inherits "2.0.3" -uuid@^3.3.2: - version "3.4.0" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" - integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== - -v8-compile-cache@2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.0.3.tgz#00f7494d2ae2b688cfe2899df6ed2c54bef91dbe" - integrity sha512-CNmdbwQMBjwr9Gsmohvm0pbL954tJrNzf6gWL3K+QMQf00PF7ERGrEiLgjuU3mKreLC2MeGhUsNV9ybTbLgd3w== - -v8-compile-cache@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz#e14de37b31a6d194f5690d67efc4e7f6fc6ab30e" - integrity sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g== +v8-compile-cache@^2.0.3, v8-compile-cache@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.1.1.tgz#54bc3cdd43317bca91e35dcaf305b1a7237de745" + integrity sha512-8OQ9CL+VWyt3JStj7HX7/ciTL2V3Rl1Wf5OL+SNTm0yK1KvtReVulksyeRnCANHHuUxHlQig+JJDlUhBt1NQDQ== validate-npm-package-license@^3.0.1: version "3.0.4" @@ -6692,39 +7087,40 @@ vm-browserify@^1.0.1: resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== -watchpack@^1.6.0: - version "1.6.1" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.6.1.tgz#280da0a8718592174010c078c7585a74cd8cd0e2" - integrity sha512-+IF9hfUFOrYOOaKyfaI7h7dquUIOgyEMoQMLA7OP5FxegKA2+XdXThAZ9TU2kucfhDH7rfMHs1oPYziVGWRnZA== +watchpack-chokidar2@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/watchpack-chokidar2/-/watchpack-chokidar2-2.0.0.tgz#9948a1866cbbd6cb824dea13a7ed691f6c8ddff0" + integrity sha512-9TyfOyN/zLUbA288wZ8IsMZ+6cbzvsNyEzSBp6e/zkifi6xxbl8SmQ/CxQq32k8NNqrdVEVUVSEf56L4rQ/ZxA== dependencies: chokidar "^2.1.8" + +watchpack@^1.6.1: + version "1.7.2" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.7.2.tgz#c02e4d4d49913c3e7e122c3325365af9d331e9aa" + integrity sha512-ymVbbQP40MFTp+cNMvpyBpBtygHnPzPkHqoIwRRj/0B8KhqQwV8LaKjtbaxF2lK4vl8zN9wCxS46IFCU5K4W0g== + dependencies: graceful-fs "^4.1.2" neo-async "^2.5.0" + optionalDependencies: + chokidar "^3.4.0" + watchpack-chokidar2 "^2.0.0" webpack-cli@^3.1.0: - version "3.3.10" - resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.10.tgz#17b279267e9b4fb549023fae170da8e6e766da13" - integrity sha512-u1dgND9+MXaEt74sJR4PR7qkPxXUSQ0RXYq8x1L6Jg1MYVEmGPrH6Ah6C4arD4r0J1P5HKjRqpab36k0eIzPqg== - dependencies: - chalk "2.4.2" - cross-spawn "6.0.5" - enhanced-resolve "4.1.0" - findup-sync "3.0.0" - global-modules "2.0.0" - import-local "2.0.0" - interpret "1.2.0" - loader-utils "1.2.3" - supports-color "6.1.0" - v8-compile-cache "2.0.3" - yargs "13.2.4" - -webpack-log@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-2.0.0.tgz#5b7928e0637593f119d32f6227c1e0ac31e1b47f" - integrity sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg== + version "3.3.12" + resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.12.tgz#94e9ada081453cd0aa609c99e500012fd3ad2d4a" + integrity sha512-NVWBaz9k839ZH/sinurM+HcDvJOTXwSjYp1ku+5XKeOC03z8v5QitnK/x+lAxGXFyhdayoIf/GOpv85z3/xPag== dependencies: - ansi-colors "^3.0.0" - uuid "^3.3.2" + chalk "^2.4.2" + cross-spawn "^6.0.5" + enhanced-resolve "^4.1.1" + findup-sync "^3.0.0" + global-modules "^2.0.0" + import-local "^2.0.0" + interpret "^1.4.0" + loader-utils "^1.4.0" + supports-color "^6.1.0" + v8-compile-cache "^2.1.1" + yargs "^13.3.2" webpack-manifest-plugin@^2.2.0: version "2.2.0" @@ -6736,7 +7132,7 @@ webpack-manifest-plugin@^2.2.0: object.entries "^1.1.0" tapable "^1.0.0" -webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1: +webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1, webpack-sources@^1.4.3: version "1.4.3" resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== @@ -6745,15 +7141,15 @@ webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1: source-map "~0.6.1" webpack@^4.16.3: - version "4.42.1" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.42.1.tgz#ae707baf091f5ca3ef9c38b884287cfe8f1983ef" - integrity sha512-SGfYMigqEfdGchGhFFJ9KyRpQKnipvEvjc1TwrXEPCM6H5Wywu10ka8o3KGrMzSMxMQKt8aCHUFh5DaQ9UmyRg== + version "4.43.0" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.43.0.tgz#c48547b11d563224c561dad1172c8aa0b8a678e6" + integrity sha512-GW1LjnPipFW2Y78OOab8NJlCflB7EFskMih2AHdvjbpKMeDJqEgSx24cXXXiPS65+WSwVyxtDsJH6jGX2czy+g== dependencies: "@webassemblyjs/ast" "1.9.0" "@webassemblyjs/helper-module-context" "1.9.0" "@webassemblyjs/wasm-edit" "1.9.0" "@webassemblyjs/wasm-parser" "1.9.0" - acorn "^6.2.1" + acorn "^6.4.1" ajv "^6.10.2" ajv-keywords "^3.4.1" chrome-trace-event "^1.0.2" @@ -6770,7 +7166,7 @@ webpack@^4.16.3: schema-utils "^1.0.0" tapable "^1.1.3" terser-webpack-plugin "^1.4.3" - watchpack "^1.6.0" + watchpack "^1.6.1" webpack-sources "^1.4.1" which-module@^2.0.0: @@ -6785,15 +7181,22 @@ which@^1.2.14, which@^1.2.9, which@^1.3.1: dependencies: isexe "^2.0.0" -word-wrap@~1.2.3: +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +word-wrap@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== -wordwrap@~0.0.2: - version "0.0.3" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" - integrity sha1-o9XabNXAvAAI03I0u68b7WMFkQc= +wordwrap@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= worker-farm@^1.7.0: version "1.7.0" @@ -6850,13 +7253,6 @@ write@1.0.3: dependencies: mkdirp "^0.5.1" -write@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/write/-/write-0.2.1.tgz#5fc03828e264cea3fe91455476f7a3c566cb0757" - integrity sha1-X8A4KOJkzqP+kUVUdvejxWbLB1c= - dependencies: - mkdirp "^0.5.1" - xtend@^4.0.0, xtend@^4.0.1, xtend@~4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" @@ -6867,16 +7263,16 @@ xtend@^4.0.0, xtend@^4.0.1, xtend@~4.0.1: resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== -yallist@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" - integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= - yallist@^3.0.2: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + yaml@^1.7.2: version "1.8.3" resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.8.3.tgz#2f420fca58b68ce3a332d0ca64be1d191dd3f87a" @@ -6897,39 +7293,22 @@ yargs-parser@^11.1.1: camelcase "^5.0.0" decamelize "^1.2.0" -yargs-parser@^13.1.0: - version "13.1.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0" - integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ== +yargs-parser@^13.1.2: + version "13.1.2" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38" + integrity sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" -yargs-parser@^18.1.1: - version "18.1.2" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.2.tgz#2f482bea2136dbde0861683abea7756d30b504f1" - integrity sha512-hlIPNR3IzC1YuL1c2UwwDKpXlNFBqD1Fswwh1khz5+d8Cq/8yc/Mn0i+rQXduu8hcrFKvO7Eryk+09NecTQAAQ== +yargs-parser@^18.1.2, yargs-parser@^18.1.3: + version "18.1.3" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0" + integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" -yargs@13.2.4: - version "13.2.4" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.2.4.tgz#0b562b794016eb9651b98bd37acf364aa5d6dc83" - integrity sha512-HG/DWAJa1PAnHT9JAhNa8AbAv3FPaiLzioSjCcmuXXhP8MlpHO5vwls4g4j6n30Z74GVQj8Xa62dWVx1QCGklg== - dependencies: - cliui "^5.0.0" - find-up "^3.0.0" - get-caller-file "^2.0.1" - os-locale "^3.1.0" - require-directory "^2.1.1" - require-main-filename "^2.0.0" - set-blocking "^2.0.0" - string-width "^3.0.0" - which-module "^2.0.0" - y18n "^4.0.0" - yargs-parser "^13.1.0" - yargs@^12.0.5: version "12.0.5" resolved "https://registry.yarnpkg.com/yargs/-/yargs-12.0.5.tgz#05f5997b609647b64f66b81e3b4b10a368e7ad13" @@ -6948,10 +7327,26 @@ yargs@^12.0.5: y18n "^3.2.1 || ^4.0.0" yargs-parser "^11.1.1" +yargs@^13.3.2: + version "13.3.2" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd" + integrity sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw== + dependencies: + cliui "^5.0.0" + find-up "^3.0.0" + get-caller-file "^2.0.1" + require-directory "^2.1.1" + require-main-filename "^2.0.0" + set-blocking "^2.0.0" + string-width "^3.0.0" + which-module "^2.0.0" + y18n "^4.0.0" + yargs-parser "^13.1.2" + yargs@^15.3.1: - version "15.3.1" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.3.1.tgz#9505b472763963e54afe60148ad27a330818e98b" - integrity sha512-92O1HWEjw27sBfgmXiixJWT5hRBp2eobqXicLtPBIDBhYB+1HpwZlXmbW2luivBJHBzki+7VyCLRtAkScbTBQA== + version "15.4.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" + integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A== dependencies: cliui "^6.0.0" decamelize "^1.2.0" @@ -6963,4 +7358,4 @@ yargs@^15.3.1: string-width "^4.2.0" which-module "^2.0.0" y18n "^4.0.0" - yargs-parser "^18.1.1" + yargs-parser "^18.1.2" diff --git a/breeze b/breeze index 5807144a15b20..c122b17bb80ff 100755 --- a/breeze +++ b/breeze @@ -97,6 +97,9 @@ function setup_default_breeze_variables() { # By default we mount local Airflow sources MOUNT_LOCAL_SOURCES="true" + # By default we mount files folder + MOUNT_FILES="true" + # Holds last sub-command used - this is used by --help flag to print help for the command entered LAST_SUBCOMMAND="" @@ -492,6 +495,7 @@ function prepare_command_files() { MAIN_PROD_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/base.yml BACKEND_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/backend-${BACKEND}.yml LOCAL_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/local.yml + FILES_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/files.yml LOCAL_PROD_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/local-prod.yml REMOVE_SOURCES_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/remove-sources.yml FORWARD_CREDENTIALS_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/forward-credentials.yml @@ -504,6 +508,11 @@ function prepare_command_files() { COMPOSE_PROD_FILE=${COMPOSE_PROD_FILE}:${LOCAL_PROD_DOCKER_COMPOSE_FILE} fi + if [[ "${MOUNT_FILES}" != "false" ]]; then + COMPOSE_CI_FILE=${COMPOSE_CI_FILE}:${FILES_DOCKER_COMPOSE_FILE} + COMPOSE_PROD_FILE=${COMPOSE_PROD_FILE}:${FILES_DOCKER_COMPOSE_FILE} + fi + if [[ ${FORWARD_CREDENTIALS} == "true" ]]; then COMPOSE_CI_FILE=${COMPOSE_CI_FILE}:${FORWARD_CREDENTIALS_DOCKER_COMPOSE_FILE} COMPOSE_PROD_FILE=${COMPOSE_PROD_FILE}:${FORWARD_CREDENTIALS_DOCKER_COMPOSE_FILE} @@ -2125,8 +2134,10 @@ function run_breeze_command { enter_breeze) if [[ ${PRODUCTION_IMAGE} == "true" ]]; then "${BUILD_CACHE_DIR}/${LAST_DC_PROD_FILE}" run --service-ports --rm airflow "${@}" + "${SCRIPTS_CI_DIR}/tools/ci_fix_ownership.sh" else "${BUILD_CACHE_DIR}/${LAST_DC_CI_FILE}" run --service-ports --rm airflow "${@}" + "${SCRIPTS_CI_DIR}/tools/ci_fix_ownership.sh" "${SCRIPTS_CI_DIR}/tools/ci_clear_tmp.sh" fi ;; diff --git a/chart/templates/NOTES.txt b/chart/templates/NOTES.txt index c4e65cf10b2c3..7eb9d84a1218f 100644 --- a/chart/templates/NOTES.txt +++ b/chart/templates/NOTES.txt @@ -22,8 +22,8 @@ Your release is named {{ .Release.Name }}. You can now access your dashboard(s) by executing the following command(s) and visiting the corresponding port at localhost in your browser: -Airflow dashboard: kubectl port-forward svc/{{ .Release.Name }}-webserver {{ .Values.ports.airflowUI }}:{{ .Values.ports.airflowUI }} --namespace airflow +Airflow dashboard: kubectl port-forward svc/{{ .Release.Name }}-webserver {{ .Values.ports.airflowUI }}:{{ .Values.ports.airflowUI }} --namespace {{ .Release.Namespace }} {{- if eq .Values.executor "CeleryExecutor"}} -Flower dashboard: kubectl port-forward svc/{{ .Release.Name }}-flower {{ .Values.ports.flowerUI }}:{{ .Values.ports.flowerUI }} --namespace airflow +Flower dashboard: kubectl port-forward svc/{{ .Release.Name }}-flower {{ .Values.ports.flowerUI }}:{{ .Values.ports.flowerUI }} --namespace {{ .Release.Namespace }} {{- end }} diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000000000..126bb150ca895 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,70 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +codecov: + require_ci_to_pass: true + notify: + wait_for_ci: false + +coverage: + precision: 2 + round: down + range: "85...100" + status: + project: + default: + # basic + target: auto + threshold: 0% + base: auto + paths: + - "airflow" + # advanced + branches: + - master + - v1-10-stable + - v1-10-test + if_not_found: success + if_ci_failed: error + informational: true + only_pulls: false + patch: + default: + # basic + target: auto + threshold: 0% + base: auto + # advanced + branches: + - master + - v1-10-stable + - v1-10-test + if_no_uploads: error + if_not_found: success + if_ci_failed: error + only_pulls: false + paths: + - "airflow" +parsers: + gcov: + branch_detection: + conditional: yes + loop: yes + method: no + macro: no + +comment: false diff --git a/dev/BACKPORT_PACKAGES.md b/dev/BACKPORT_PACKAGES.md index 7ba77a6ea6cd8..a5ed4d2fb3aba 100644 --- a/dev/BACKPORT_PACKAGES.md +++ b/dev/BACKPORT_PACKAGES.md @@ -131,7 +131,7 @@ The script generates two types of files: Note that our CI system builds the release notes for backport packages automatically with every build and current date - this way you might be sure the automated generation of the release notes continues to -work. You can also preview the generated readme files (by downloading artifacts uploaded to file.io). +work. You can also preview the generated readme files (by downloading artifacts from Github Actions). The script does not modify the README and CHANGES files if there is no change in the repo for that provider. # Preparing backport packages diff --git a/docs/autoapi_templates/index.rst b/docs/autoapi_templates/index.rst index a04c53edc1260..a1263861c25e5 100644 --- a/docs/autoapi_templates/index.rst +++ b/docs/autoapi_templates/index.rst @@ -114,8 +114,6 @@ All operators are in the following packages: airflow/providers/docker/operators/index - airflow/providers/email/operators/index - airflow/providers/exasol/operators/index airflow/providers/ftp/sensors/index diff --git a/docs/build b/docs/build index d8a93da286acd..257c7678eb2f5 100755 --- a/docs/build +++ b/docs/build @@ -355,7 +355,6 @@ MISSING_GOOGLLE_DOC_GUIDES = { 'bigquery_to_mysql', 'cassandra_to_gcs', 'dataflow', - 'datastore', 'dlp', 'gcs_to_bigquery', 'mssql_to_gcs', diff --git a/docs/concepts.rst b/docs/concepts.rst index 8f5644bc14f03..b75cb4105a6c4 100644 --- a/docs/concepts.rst +++ b/docs/concepts.rst @@ -380,7 +380,7 @@ Airflow provides operators for many common tasks, including: - :class:`~airflow.operators.bash.BashOperator` - executes a bash command - :class:`~airflow.operators.python.PythonOperator` - calls an arbitrary Python function -- :class:`~airflow.providers.email.operators.email.EmailOperator` - sends an email +- :class:`~airflow.operators.email.EmailOperator` - sends an email - :class:`~airflow.providers.http.operators.http.SimpleHttpOperator` - sends an HTTP request - :class:`~airflow.providers.mysql.operators.mysql.MySqlOperator`, :class:`~airflow.providers.sqlite.operators.sqlite.SqliteOperator`, @@ -675,11 +675,6 @@ managed in the UI (``Menu -> Admin -> Connections``). A ``conn_id`` is defined password / schema information attached to it. Airflow pipelines retrieve centrally-managed connections information by specifying the relevant ``conn_id``. -You may add more than one connection with the same ``conn_id``. When there is more than one connection -with the same ``conn_id``, the :py:meth:`~airflow.hooks.base_hook.BaseHook.get_connection` method on -:py:class:`~airflow.hooks.base_hook.BaseHook` will choose one connection randomly. This can be be used to -provide basic load balancing and fault tolerance, when used in conjunction with retries. - Airflow also provides a mechanism to store connections outside the database, e.g. in :ref:`environment variables `. Additional sources may be enabled, e.g. :ref:`AWS SSM Parameter Store `, or you may :ref:`roll your own secrets backend `. diff --git a/docs/executor/index.rst b/docs/executor/index.rst index 6aa0544af450f..57b816ff5f2bb 100644 --- a/docs/executor/index.rst +++ b/docs/executor/index.rst @@ -28,6 +28,14 @@ full path to the module e.g. ``my_acme_company.executors.MyCustomExecutor``. .. note:: For more information on setting the configuration, see :doc:`../howto/set-config`. +If you want to check which executor is currently set, you can use ``airflow config get-value core executor`` command as in +the example below. + +.. code-block:: bash + + $ airflow config get-value core executor + SequentialExecutor + .. toctree:: :maxdepth: 1 diff --git a/docs/howto/connection/index.rst b/docs/howto/connection/index.rst index d88e020609042..c2d9509ac2859 100644 --- a/docs/howto/connection/index.rst +++ b/docs/howto/connection/index.rst @@ -88,6 +88,93 @@ Alternatively you may specify each parameter individually: --conn-schema 'schema' \ ... +.. _connection/export: + +Exporting Connections from the CLI +---------------------------------- + +You may export connections from the database using the CLI. The supported formats are ``json``, ``yaml`` and ``env``. + +You may mention the target file as the parameter: + +.. code-block:: bash + + airflow connections export connections.json + +Alternatively you may specify ``format`` parameter for overriding the format: + +.. code-block:: bash + + airflow connections export /tmp/connections --format yaml + +You may also specify ``-`` for STDOUT: + +.. code-block:: bash + + airflow connections export - + +The JSON format contains an object where the key contains the connection ID and the value contains the definition of the connection. In this format, the connection is defined as a JSON object. The following is a sample JSON file. + +.. code-block:: json + + { + "CONN_A": { + "conn_type": "mysql", + "host": "mysql", + "login": "root", + "password": "plainpassword", + "schema": "airflow", + "port": null, + "extra": null, + "is_encrypted": false, + "is_extra_encrypted": false + }, + "CONN_B": { + "conn_type": "druid", + "host": "druid-broker", + "login": null, + "password": null, + "schema": null, + "port": 8082, + "extra": "{\"endpoint\": \"druid/v2/sql\"}", + "is_encrypted": false, + "is_extra_encrypted": false + } + } + +The YAML file structure is similar to that of a JSON. The key-value pair of connection ID and the definitions of one or more connections. In this format, the connection is defined as a YAML object. The following is a sample YAML file. + +.. code-block:: yaml + + CONN_A: + conn_type: mysql + extra: + host: mysql + is_encrypted: false + is_extra_encrypted: false + login: root + password: plainpassword + port: + schema: airflow + + CONN_B: + conn_type: druid + extra: '{"endpoint": "druid/v2/sql"}' + host: druid-broker + is_encrypted: false + is_extra_encrypted: false + login: + password: + port: 8082 + schema: + +You may also export connections in ``.env`` format. The key is the connection ID, and the value describes the connection using the URI. The following is a sample ENV file. + +.. code-block:: text + + CONN_A=mysql://root:plainpassword@mysql/airflow + CONN_B=druid://druid-broker:8082?endpoint=druid%2Fv2%2Fsql + .. _environment_variables_secrets_backend: Storing a Connection in Environment Variables @@ -127,7 +214,7 @@ Alternative secrets backend --------------------------- In addition to retrieving connections from environment variables or the metastore database, you can enable -an alternative secrets backend to retrieve connections. For more details see :doc:`../use-alternative-secrets-backend` +an alternative secrets backend to retrieve connections. For more details see :doc:`../secrets-backend/index` Connection URI format --------------------- diff --git a/docs/howto/custom-operator.rst b/docs/howto/custom-operator.rst index 153f7510f81e5..94c82402b6d79 100644 --- a/docs/howto/custom-operator.rst +++ b/docs/howto/custom-operator.rst @@ -47,8 +47,8 @@ Let's implement an example ``HelloOperator`` in a new file ``hello_operator.py`` def __init__( self, name: str, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.name = name def execute(self, context): @@ -104,8 +104,8 @@ Let's extend our previous example to fetch name from MySQL: name: str, mysql_conn_id: str, database: str, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.name = name self.mysql_conn_id = mysql_conn_id self.database = database @@ -157,8 +157,8 @@ the operator. def __init__( self, name: str, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.name = name def execute(self, context): @@ -193,8 +193,8 @@ with actual value. Note that Jinja substitutes the operator attributes and not t def __init__( self, name: str, - *args, **kwargs) -> None: - super().__init__(*args, **kwargs) + **kwargs) -> None: + super().__init__(**kwargs) self.guest_name = name In the example, the ``template_fields`` should be ``['guest_name']`` and not ``['name']`` diff --git a/docs/howto/define_extra_link.rst b/docs/howto/define_extra_link.rst index 47bb5ed3b7d0b..bded978cf7e01 100644 --- a/docs/howto/define_extra_link.rst +++ b/docs/howto/define_extra_link.rst @@ -48,8 +48,8 @@ The following code shows how to add extra links to an operator: ) @apply_defaults - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, **kwargs): + super().__init__(**kwargs) def execute(self, context): self.log.info("Hello World!") diff --git a/docs/howto/email-config.rst b/docs/howto/email-config.rst index 686fefa9e01f4..4127b39ffc71d 100644 --- a/docs/howto/email-config.rst +++ b/docs/howto/email-config.rst @@ -16,7 +16,7 @@ under the License. Email Configuration -------------------- +=================== You can configure the email that is being sent in your ``airflow.cfg`` by setting a ``subject_template`` and/or a ``html_content_template`` @@ -31,6 +31,14 @@ in the ``email`` section. subject_template = /path/to/my_subject_template_file html_content_template = /path/to/my_html_content_template_file +If you want to check which email backend is currently set, you can use ``airflow config get-value email email_backend`` command as in +the example below. + +.. code-block:: bash + + $ airflow config get-value email email_backend + airflow.utils.email.send_email_smtp + To access the task's information you use `Jinja Templating `_ in your template files. For example a ``html_content_template`` file could look like this: diff --git a/docs/howto/index.rst b/docs/howto/index.rst index 837f0f3a632f0..a47dd20477c16 100644 --- a/docs/howto/index.rst +++ b/docs/howto/index.rst @@ -46,4 +46,4 @@ configuring an Airflow environment. define_extra_link tracking-user-activity email-config - use-alternative-secrets-backend + secrets-backend/index diff --git a/docs/howto/operator/google/cloud/datastore.rst b/docs/howto/operator/google/cloud/datastore.rst new file mode 100644 index 0000000000000..a73f4265eac2d --- /dev/null +++ b/docs/howto/operator/google/cloud/datastore.rst @@ -0,0 +1,173 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Google Cloud Datastore Operators +================================ + +Firestore in Datastore mode is a NoSQL document database built for automatic scaling, +high performance, and ease of application development. + +For more information about the service visit +`Datastore product documentation `__ + +.. contents:: + :depth: 1 + :local: + +Prerequisite Tasks +------------------ + +.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst + + +.. _howto/operator:CloudDatastoreExportEntitiesOperator: + +Export Entities +--------------- + +To export entities from Google Cloud Datastore to Cloud Storage use +:class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreExportEntitiesOperator` + +.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py + :language: python + :dedent: 4 + :start-after: [START how_to_export_task] + :end-before: [END how_to_export_task] + +.. _howto/operator:CloudDatastoreImportEntitiesOperator: + +Import Entities +--------------- + +To import entities from Cloud Storage to Google Cloud Datastore use +:class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreImportEntitiesOperator` + +.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py + :language: python + :dedent: 4 + :start-after: [START how_to_import_task] + :end-before: [END how_to_import_task] + +.. _howto/operator:CloudDatastoreAllocateIdsOperator: + +Allocate Ids +------------ + +To allocate IDs for incomplete keys use +:class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreAllocateIdsOperator` + +.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py + :language: python + :dedent: 4 + :start-after: [START how_to_allocate_ids] + :end-before: [END how_to_allocate_ids] + +An example of a partial keys required by the operator: + +.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py + :language: python + :dedent: 0 + :start-after: [START how_to_keys_def] + :end-before: [END how_to_keys_def] + +.. _howto/operator:CloudDatastoreBeginTransactionOperator: + +Begin transaction +----------------- + +To begin a new transaction use +:class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreBeginTransactionOperator` + +.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py + :language: python + :dedent: 4 + :start-after: [START how_to_begin_transaction] + :end-before: [END how_to_begin_transaction] + +An example of a transaction options required by the operator: + +.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py + :language: python + :dedent: 0 + :start-after: [START how_to_transaction_def] + :end-before: [END how_to_transaction_def] + +.. _howto/operator:CloudDatastoreCommitOperator: + +Commit transaction +------------------ + +To commit a transaction, optionally creating, deleting or modifying some entities +use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreCommitOperator` + +.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py + :language: python + :dedent: 4 + :start-after: [START how_to_commit_task] + :end-before: [END how_to_commit_task] + +An example of a commit information required by the operator: + +.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py + :language: python + :dedent: 0 + :start-after: [START how_to_commit_def] + :end-before: [END how_to_commit_def] + +.. _howto/operator:CloudDatastoreRunQueryOperator: + +Run query +--------- + +To run a query for entities use +:class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreRunQueryOperator` + +.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py + :language: python + :dedent: 4 + :start-after: [START how_to_run_query] + :end-before: [END how_to_run_query] + +An example of a query required by the operator: + +.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py + :language: python + :dedent: 0 + :start-after: [START how_to_query_def] + :end-before: [END how_to_query_def] + +.. _howto/operator:CloudDatastoreRollbackOperator: + +Roll back transaction +--------------------- + +To roll back a transaction +use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreRollbackOperator` + +.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py + :language: python + :dedent: 4 + :start-after: [START how_to_rollback_transaction] + :end-before: [END how_to_rollback_transaction] + + +References +^^^^^^^^^^ +For further information, take a look at: + +* `Datastore API documentation `__ +* `Product documentation `__ diff --git a/docs/howto/operator/python.rst b/docs/howto/operator/python.rst index 46e7d4ce25dc3..8b6d726bb8494 100644 --- a/docs/howto/operator/python.rst +++ b/docs/howto/operator/python.rst @@ -50,3 +50,34 @@ argument. The ``templates_dict`` argument is templated, so each value in the dictionary is evaluated as a :ref:`Jinja template `. + + + +.. _howto/operator:PythonVirtualenvOperator: + +PythonVirtualenvOperator +======================== + +Use the :class:`~airflow.operators.python.PythonVirtualenvOperator` to execute +Python callables inside a new Python virtual environment. + +.. exampleinclude:: ../../../airflow/example_dags/example_python_operator.py + :language: python + :start-after: [START howto_operator_python_venv] + :end-before: [END howto_operator_python_venv] + +Passing in arguments +^^^^^^^^^^^^^^^^^^^^ + +You can use the ``op_args`` and ``op_kwargs`` arguments the same way you use it in the PythonOperator. +Unfortunately we currently do not support to serialize ``var`` and ``ti`` / ``task_instance`` due to incompatibilities +with the underlying library. For airflow context variables make sure that you either have access to Airflow through +setting ``system_site_packages`` to ``True`` or add ``apache-airflow`` to the ``requirements`` argument. +Otherwise you won't have access to the most context variables of Airflow in ``op_kwargs``. +If you want the context related to datetime objects like ``execution_date`` you can add ``pendulum`` and +``lazy_object_proxy``. + +Templating +^^^^^^^^^^ + +You can use jinja Templating the same way you use it in PythonOperator. diff --git a/docs/howto/secrets-backend/aws-secrets-manaager-backend.rst b/docs/howto/secrets-backend/aws-secrets-manaager-backend.rst new file mode 100644 index 0000000000000..d52c5e445b5b6 --- /dev/null +++ b/docs/howto/secrets-backend/aws-secrets-manaager-backend.rst @@ -0,0 +1,73 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +AWS Secrets Manager Backend +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To enable Secrets Manager, specify :py:class:`~airflow.providers.amazon.aws.secrets.secrets_manager.SecretsManagerBackend` +as the ``backend`` in ``[secrets]`` section of ``airflow.cfg``. + +Here is a sample configuration: + +.. code-block:: ini + + [secrets] + backend = airflow.providers.amazon.aws.secrets.secrets_manager.SecretsManagerBackend + backend_kwargs = {"connections_prefix": "airflow/connections", "variables_prefix": "airflow/variables", "profile_name": "default"} + +To authenticate you can either supply a profile name to reference aws profile, e.g. defined in ``~/.aws/config`` or set +environment variables like ``AWS_ACCESS_KEY_ID``, ``AWS_SECRET_ACCESS_KEY``. + + +Storing and Retrieving Connections +"""""""""""""""""""""""""""""""""" + +If you have set ``connections_prefix`` as ``airflow/connections``, then for a connection id of ``smtp_default``, +you would want to store your connection at ``airflow/connections/smtp_default``. + +Example: + +.. code-block:: bash + + aws secretsmanager put-secret-value \ + --secret-id airflow/connections/smtp_default \ + --secret-string "smtps://user:host@relay.example.com:465" + +Verify that you can get the secret: + +.. code-block:: console + + ❯ aws secretsmanager get-secret-value --secret-id airflow/connections/smtp_default + { + "ARN": "arn:aws:secretsmanager:us-east-2:314524341751:secret:airflow/connections/smtp_default-7meuul", + "Name": "airflow/connections/smtp_default", + "VersionId": "34f90eff-ea21-455a-9c8f-5ee74b21be672", + "SecretString": "smtps://user:host@relay.example.com:465", + "VersionStages": [ + "AWSCURRENT" + ], + "CreatedDate": "2020-04-08T02:10:35.132000+01:00" + } + +The value of the secret must be the :ref:`connection URI representation ` +of the connection object. + +Storing and Retrieving Variables +"""""""""""""""""""""""""""""""" + +If you have set ``variables_prefix`` as ``airflow/variables``, then for an Variable key of ``hello``, +you would want to store your Variable at ``airflow/variables/hello``. diff --git a/docs/howto/secrets-backend/aws-ssm-parameter-store-secrets-backend.rst b/docs/howto/secrets-backend/aws-ssm-parameter-store-secrets-backend.rst new file mode 100644 index 0000000000000..4d9980084eb2a --- /dev/null +++ b/docs/howto/secrets-backend/aws-ssm-parameter-store-secrets-backend.rst @@ -0,0 +1,51 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. _ssm_parameter_store_secrets: + +AWS SSM Parameter Store Secrets Backend +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To enable SSM parameter store, specify :py:class:`~airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend` +as the ``backend`` in ``[secrets]`` section of ``airflow.cfg``. + +Here is a sample configuration: + +.. code-block:: ini + + [secrets] + backend = airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend + backend_kwargs = {"connections_prefix": "/airflow/connections", "variables_prefix": "/airflow/variables", "profile_name": "default"} + +Storing and Retrieving Connections +"""""""""""""""""""""""""""""""""" + +If you have set ``connections_prefix`` as ``/airflow/connections``, then for a connection id of ``smtp_default``, +you would want to store your connection at ``/airflow/connections/smtp_default``. + +Optionally you can supply a profile name to reference aws profile, e.g. defined in ``~/.aws/config``. + +The value of the SSM parameter must be the :ref:`connection URI representation ` +of the connection object. + +Storing and Retrieving Variables +"""""""""""""""""""""""""""""""" + +If you have set ``variables_prefix`` as ``/airflow/variables``, then for an Variable key of ``hello``, +you would want to store your Variable at ``/airflow/variables/hello``. + +Optionally you can supply a profile name to reference aws profile, e.g. defined in ``~/.aws/config``. diff --git a/docs/howto/secrets-backend/google-cloud-secret-manager-backend.rst b/docs/howto/secrets-backend/google-cloud-secret-manager-backend.rst new file mode 100644 index 0000000000000..b7e58152c05e4 --- /dev/null +++ b/docs/howto/secrets-backend/google-cloud-secret-manager-backend.rst @@ -0,0 +1,134 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. _google_cloud_secret_manager_backend: + +Google Cloud Secret Manager Backend +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +This topic describes how to configure Airflow to use `Secret Manager `__ as +a secret backend and how to manage secrets. + +Before you begin +"""""""""""""""" + +`Configure Secret Manager and your local environment `__, once per project. + +Enabling the secret backend +""""""""""""""""""""""""""" + +To enable the secret backend for Google Cloud Secrets Manager to retrieve connection/variables, +specify :py:class:`~airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend` +as the ``backend`` in ``[secrets]`` section of ``airflow.cfg``. + +Here is a sample configuration if you want to use it: + +.. code-block:: ini + + [secrets] + backend = airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend + +You can also set this with environment variables. + +.. code-block:: bash + + export AIRFLOW__SECRETS__BACKEND=airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend + +You can verify the correct setting of the configuration options with the ``airflw config get-value`` command. + +.. code-block:: bash + + $ airflow config get-value secrets backend + airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend + +Backend parameters +"""""""""""""""""" + +The next step is to configure backend parameters using the ``backend_kwargs`` options. You can pass +the following parameters: + +* ``connections_prefix``: Specifies the prefix of the secret to read to get Connections. Default: ``"airflow-connections"`` +* ``variables_prefix``: Specifies the prefix of the secret to read to get Variables. Default: ``"airflow-variables"`` +* ``gcp_key_path``: Path to GCP Credential JSON file. +* ``gcp_keyfile_dict``: Dictionary of keyfile parameters. +* ``gcp_scopes``: Comma-separated string containing GCP scopes. +* ``sep``: Separator used to concatenate connections_prefix and conn_id. Default: "-" +* ``project_id``: Project ID to read the secrets from. If not passed, the project ID from credentials will be used. + +All options should be passed as a JSON dictionary. + +For example, if you want to set parameter ``connections_prefix`` to ``"airflow-tenant-primary"`` and parameter ``variables_prefix`` to ``"variables_prefix"``, your configuration file should look like this: + +.. code-block:: ini + + [secrets] + backend = airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend + backend_kwargs = {"connections_prefix": "airflow-tenant-primary", "variables_prefix": "airflow-tenant-primary"} + +Set-up credentials +"""""""""""""""""" + +You can configure the credentials in three ways: + +* By default, Application Default Credentials (ADC) is used obtain credentials. +* ``gcp_key_path`` option in ``backend_kwargs`` option - allows you to configure authorizations with a service account stored in local file. +* ``gcp_keyfile_dict`` option in ``backend_kwargs`` option - allows you to configure authorizations with a service account stored in Airflow configuration. + +.. note:: + + For more information about the Application Default Credentials (ADC), see: + + * `google.auth.default `__ + * `Setting Up Authentication for Server to Server Production Applications `__ + +Managing secrets +"""""""""""""""" + +If you want to configure a connection, you need to save it as a :ref:`connection URI representation `. +Variables should be saved as plain text. + +In order to manage secrets, you can use the ``gcloud`` tool or other supported tools. For more information, take a look at: +`Managing secrets `__ in Google Cloud Documentation. + +The name of the secret must fit the following formats: + + * for variable: ``[connections_prefix][sep][variable_name]`` + * for connection: ``[variable_prefix][sep][connection_name]`` + +where: + + * ``connections_prefix`` - fixed value defined in the ``connections_prefix`` parameter in backend configuration. Default: ``airflow-connections``. + * ``variable_prefix`` - fixed value defined in the ``variable_prefix`` parameter in backend configuration. Default: ``airflow-variables``. + * ``sep`` - fixed value defined in the ``sep`` parameter in backend configuration. Default: ``-``. + +The Cloud Secrets Manager secret name should follow the pattern ``[a-zA-Z0-9-_]``. + +If you have the default backend configuration and you want to create a connection with ``conn_id`` +equals ``first-connection``, you should create secret named ``airflow-connections-first-connection``. +You can do it with the gcloud tools as in the example below. + +.. code-block:: bash + + echo "mysql://example.org" | gcloud beta secrets create airflow-connections-first-connection --data-file=- + +If you have the default backend configuration and you want to create a variable named ``first-variable``, +you should create a secret named ``airflow-variables-first-variable``. You can do it with the gcloud +command as in the example below. + +.. code-block:: bash + + echo "content" | gcloud beta secrets create airflow-variables-first-variable --data-file= diff --git a/docs/howto/secrets-backend/hashicorp-vault-secrets-backend.rst b/docs/howto/secrets-backend/hashicorp-vault-secrets-backend.rst new file mode 100644 index 0000000000000..1b25060cd4ab7 --- /dev/null +++ b/docs/howto/secrets-backend/hashicorp-vault-secrets-backend.rst @@ -0,0 +1,117 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. _hashicorp_vault_secrets: + +Hashicorp Vault Secrets Backend +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To enable Hashicorp vault to retrieve Airflow connection/variable, specify :py:class:`~airflow.providers.hashicorp.secrets.vault.VaultBackend` +as the ``backend`` in ``[secrets]`` section of ``airflow.cfg``. + +Here is a sample configuration: + +.. code-block:: ini + + [secrets] + backend = airflow.providers.hashicorp.secrets.vault.VaultBackend + backend_kwargs = {"connections_path": "connections", "variables_path": "variables", "mount_point": "airflow", "url": "http://127.0.0.1:8200"} + +The default KV version engine is ``2``, pass ``kv_engine_version: 1`` in ``backend_kwargs`` if you use +KV Secrets Engine Version ``1``. + +You can also set and pass values to Vault client by setting environment variables. All the +environment variables listed at https://www.vaultproject.io/docs/commands/#environment-variables are supported. + +Hence, if you set ``VAULT_ADDR`` environment variable like below, you do not need to pass ``url`` +key to ``backend_kwargs``: + +.. code-block:: bash + + export VAULT_ADDR="http://127.0.0.1:8200" + + +Storing and Retrieving Connections +"""""""""""""""""""""""""""""""""" + +If you have set ``connections_path`` as ``connections`` and ``mount_point`` as ``airflow``, then for a connection id of +``smtp_default``, you would want to store your secret as: + +.. code-block:: bash + + vault kv put airflow/connections/smtp_default conn_uri=smtps://user:host@relay.example.com:465 + +Note that the ``Key`` is ``conn_uri``, ``Value`` is ``postgresql://airflow:airflow@host:5432/airflow`` and +``mount_point`` is ``airflow``. + +You can make a ``mount_point`` for ``airflow`` as follows: + +.. code-block:: bash + + vault secrets enable -path=airflow -version=2 kv + +Verify that you can get the secret from ``vault``: + +.. code-block:: console + + ❯ vault kv get airflow/connections/smtp_default + ====== Metadata ====== + Key Value + --- ----- + created_time 2020-03-19T19:17:51.281721Z + deletion_time n/a + destroyed false + version 1 + + ====== Data ====== + Key Value + --- ----- + conn_uri smtps://user:host@relay.example.com:465 + +The value of the Vault key must be the :ref:`connection URI representation ` +of the connection object to get connection. + +Storing and Retrieving Variables +"""""""""""""""""""""""""""""""" + +If you have set ``variables_path`` as ``variables`` and ``mount_point`` as ``airflow``, then for a variable with +``hello`` as key, you would want to store your secret as: + +.. code-block:: bash + + vault kv put airflow/variables/hello value=world + +Verify that you can get the secret from ``vault``: + +.. code-block:: console + + ❯ vault kv get airflow/variables/hello + ====== Metadata ====== + Key Value + --- ----- + created_time 2020-03-28T02:10:54.301784Z + deletion_time n/a + destroyed false + version 1 + + ==== Data ==== + Key Value + --- ----- + value world + +Note that the secret ``Key`` is ``value``, and secret ``Value`` is ``world`` and +``mount_point`` is ``airflow``. diff --git a/docs/howto/secrets-backend/index.rst b/docs/howto/secrets-backend/index.rst new file mode 100644 index 0000000000000..9c50218be52e6 --- /dev/null +++ b/docs/howto/secrets-backend/index.rst @@ -0,0 +1,86 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Secrets backend +--------------- + +.. versionadded:: 1.10.10 + +In addition to retrieving connections & variables from environment variables or the metastore database, you can enable +an alternative secrets backend to retrieve Airflow connections or Airflow variables, +such as :ref:`Google Cloud Secret Maanager`, +:ref:`Hashicorp Vault Secrets` or you can :ref:`roll your own `. + +.. note:: + + The Airflow UI only shows connections and variables stored in the Metadata DB and not via any other method. + If you use an alternative secrets backend, check inside your backend to view the values of your variables and connections. + +Search path +^^^^^^^^^^^ +When looking up a connection/variable, by default Airflow will search environment variables first and metastore +database second. + +If you enable an alternative secrets backend, it will be searched first, followed by environment variables, +then metastore. This search ordering is not configurable. + +.. _secrets_backend_configuration: + +Configuration +^^^^^^^^^^^^^ + +The ``[secrets]`` section has the following options: + +.. code-block:: ini + + [secrets] + backend = + backend_kwargs = + +Set ``backend`` to the fully qualified class name of the backend you want to enable. + +You can provide ``backend_kwargs`` with json and it will be passed as kwargs to the ``__init__`` method of +your secrets backend. + +Supported backends +^^^^^^^^^^^^^^^^^^ + +.. toctree:: + :maxdepth: 1 + :glob: + + * + +.. _roll_your_own_secrets_backend: + +Roll your own secrets backend +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +A secrets backend is a subclass of :py:class:`airflow.secrets.BaseSecretsBackend` and must implement either +:py:meth:`~airflow.secrets.BaseSecretsBackend.get_connections` or :py:meth:`~airflow.secrets.BaseSecretsBackend.get_conn_uri`. + +After writing your backend class, provide the fully qualified class name in the ``backend`` key in the ``[secrets]`` +section of ``airflow.cfg``. + +Additional arguments to your SecretsBackend can be configured in ``airflow.cfg`` by supplying a JSON string to ``backend_kwargs``, which will be passed to the ``__init__`` of your SecretsBackend. +See :ref:`Configuration ` for more details, and :ref:`SSM Parameter Store ` for an example. + +.. note:: + + If you are rolling your own secrets backend, you don't strictly need to use airflow's URI format. But + doing so makes it easier to switch between environment variables, the metastore, and your secrets backend. diff --git a/docs/howto/secrets-backend/local-filesystem-secrets-backend.rst b/docs/howto/secrets-backend/local-filesystem-secrets-backend.rst new file mode 100644 index 0000000000000..463a06b217950 --- /dev/null +++ b/docs/howto/secrets-backend/local-filesystem-secrets-backend.rst @@ -0,0 +1,145 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. _local_filesystem_secrets: + +Local Filesystem Secrets Backend +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +This backend is especially useful in the following use cases: + +* **Development**: It ensures data synchronization between all terminal windows (same as databases), + and at the same time the values are retained after database restart (same as environment variable) +* **Kubernetes**: It allows you to store secrets in `Kubernetes Secrets `__ + or you can synchronize values using the sidecar container and + `a shared volume `__ + +To use variable and connection from local file, specify :py:class:`~airflow.secrets.local_filesystem.LocalFilesystemBackend` +as the ``backend`` in ``[secrets]`` section of ``airflow.cfg``. + +Available parameters to ``backend_kwargs``: + +* ``variables_file_path``: File location with variables data. +* ``connections_file_path``: File location with connections data. + +Here is a sample configuration: + +.. code-block:: ini + + [secrets] + backend = airflow.secrets.local_filesystem.LocalFilesystemBackend + backend_kwargs = {"variables_file_path": "/files/var.json", "connections_file_path": "/files/conn.json"} + +``JSON``, ``YAML`` and ``.env`` files are supported. All parameters are optional. If the file path is not passed, +the backend returns an empty collection. + +Storing and Retrieving Connections +"""""""""""""""""""""""""""""""""" + +If you have set ``connections_file_path`` as ``/files/my_conn.json``, then the backend will read the +file ``/files/my_conn.json`` when it looks for connections. + +The file can be defined in ``JSON``, ``YAML`` or ``env`` format. Depending on the format, the data should be saved as a URL or as a connection object. +Any extra json parameters can be provided using keys like ``extra_dejson`` and ``extra``. +The key ``extra_dejson`` can be used to provide parameters as JSON object where as the key ``extra`` can be used in case of a JSON string. +The keys ``extra`` and ``extra_dejson`` are mutually exclusive. + +The JSON file must contain an object where the key contains the connection ID and the value contains +the definition of one connection. The connection can be defined as a URI (string) or JSON object. +For a guide about defining a connection as a URI, see:: :ref:`generating_connection_uri`. +For a description of the connection object parameters see :class:`~airflow.models.connection.Connection`. +The following is a sample JSON file. + +.. code-block:: json + + { + "CONN_A": "mysq://host_a", + "CONN_B": { + "conn_type": "scheme", + "host": "host", + "schema": "lschema", + "login": "Login", + "password": "None", + "port": "1234" + } + } + +The YAML file structure is similar to that of a JSON. The key-value pair of connection ID and the definitions of one or more connections. +In this format, the connection can be defined as a URI (string) or JSON object. + +.. code-block:: yaml + + CONN_A: 'mysq://host_a' + + CONN_B: + - 'mysq://host_a' + - 'mysq://host_b' + + CONN_C: + conn_type: scheme + host: host + schema: lschema + login: Login + password: None + port: 1234 + extra_dejson: + a: b + nestedblock_dict: + x: y + +You can also define connections using a ``.env`` file. Then the key is the connection ID, and +the value should describe the connection using the URI. Connection ID should not be repeated, it will +raise an exception. The following is a sample file. + + .. code-block:: text + + mysql_conn_id=mysql://log:password@13.1.21.1:3306/mysqldbrd + google_custom_key=google-cloud-platform://?extra__google_cloud_platform__key_path=%2Fkeys%2Fkey.json + +Storing and Retrieving Variables +"""""""""""""""""""""""""""""""" + +If you have set ``variables_file_path`` as ``/files/my_var.json``, then the backend will read the +file ``/files/my_var.json`` when it looks for variables. + +The file can be defined in ``JSON``, ``YAML`` or ``env`` format. + +The JSON file must contain an object where the key contains the variable key and the value contains +the variable value. The following is a sample JSON file. + + .. code-block:: json + + { + "VAR_A": "some_value", + "var_b": "differnet_value" + } + +The YAML file structure is similar to that of JSON, with key containing the variable key and the value containing +the variable value. The following is a sample YAML file. + + .. code-block:: yaml + + VAR_A: some_value + VAR_B: different_value + +You can also define variable using a ``.env`` file. Then the key is the variable key, and variable should +describe the variable value. The following is a sample file. + + .. code-block:: text + + VAR_A=some_value + var_B=different_value diff --git a/docs/howto/set-config.rst b/docs/howto/set-config.rst index 9fc12b66af773..0d3568d4c1dd6 100644 --- a/docs/howto/set-config.rst +++ b/docs/howto/set-config.rst @@ -98,5 +98,15 @@ The universal order of precedence for all configuration options is as follows: #. secret key in ``airflow.cfg`` #. Airflow's built in defaults +You can check the current configuration with the ``airflow config list`` command. + +If you only want to see the value for one option, you can use ``airflow config get-value`` command as in +the example below. + +.. code-block:: bash + + $ airflow config get-value core executor + SequentialExecutor + .. note:: For more information on configuration options, see :doc:`../configurations-ref` diff --git a/docs/howto/use-alternative-secrets-backend.rst b/docs/howto/use-alternative-secrets-backend.rst deleted file mode 100644 index 6b4c0c95198a4..0000000000000 --- a/docs/howto/use-alternative-secrets-backend.rst +++ /dev/null @@ -1,458 +0,0 @@ - .. Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - .. http://www.apache.org/licenses/LICENSE-2.0 - - .. Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. - - -Alternative secrets backend ---------------------------- - -.. versionadded:: 1.10.10 - -In addition to retrieving connections & variables from environment variables or the metastore database, you can enable -an alternative secrets backend to retrieve Airflow connections or Airflow variables, -such as :ref:`AWS SSM Parameter Store `, -:ref:`Hashicorp Vault Secrets` or you can :ref:`roll your own `. - -.. note:: - - The Airflow UI only shows connections and variables stored in the Metadata DB and not via any other method. - If you use an alternative secrets backend, check inside your backend to view the values of your variables and connections. - -Search path -^^^^^^^^^^^ -When looking up a connection/variable, by default Airflow will search environment variables first and metastore -database second. - -If you enable an alternative secrets backend, it will be searched first, followed by environment variables, -then metastore. This search ordering is not configurable. - -.. _secrets_backend_configuration: - -Configuration -^^^^^^^^^^^^^ - -The ``[secrets]`` section has the following options: - -.. code-block:: ini - - [secrets] - backend = - backend_kwargs = - -Set ``backend`` to the fully qualified class name of the backend you want to enable. - -You can provide ``backend_kwargs`` with json and it will be passed as kwargs to the ``__init__`` method of -your secrets backend. - -.. _local_filesystem_secrets: - -Local Filesystem Secrets Backend -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -This backend is especially useful in the following use cases: - -* **Development**: It ensures data synchronization between all terminal windows (same as databases), - and at the same time the values are retained after database restart (same as environment variable) -* **Kubernetes**: It allows you to store secrets in `Kubernetes Secrets `__ - or you can synchronize values using the sidecar container and - `a shared volume `__ - -To use variable and connection from local file, specify :py:class:`~airflow.secrets.local_filesystem.LocalFilesystemBackend` -as the ``backend`` in ``[secrets]`` section of ``airflow.cfg``. - -Available parameters to ``backend_kwargs``: - -* ``variables_file_path``: File location with variables data. -* ``connections_file_path``: File location with connections data. - -Here is a sample configuration: - -.. code-block:: ini - - [secrets] - backend = airflow.secrets.local_filesystem.LocalFilesystemBackend - backend_kwargs = {"variables_file_path": "/files/var.json", "connections_file_path": "/files/conn.json"} - -``JSON``, ``YAML`` and ``.env`` files are supported. All parameters are optional. If the file path is not passed, -the backend returns an empty collection. - -Storing and Retrieving Connections -"""""""""""""""""""""""""""""""""" - -If you have set ``connections_file_path`` as ``/files/my_conn.json``, then the backend will read the -file ``/files/my_conn.json`` when it looks for connections. - -The file can be defined in ``JSON``, ``YAML`` or ``env`` format. Depending on the format, the data should be saved as a URL or as a connection object. -Any extra json parameters can be provided using keys like ``extra_dejson`` and ``extra``. -The key ``extra_dejson`` can be used to provide parameters as JSON object where as the key ``extra`` can be used in case of a JSON string. -The keys ``extra`` and ``extra_dejson`` are mutually exclusive. - -The JSON file must contain an object where the key contains the connection ID and the value contains -the definitions of one or more connections. In this format, the connection can be defined as a URI (string) or JSON object. -The following is a sample JSON file. - -.. code-block:: json - - { - "CONN_A": "mysq://host_a", - "CONN_B": [ - "mysq://host_a", - "mysq://host_a" - ], - "CONN_C": { - "conn_type": "scheme", - "host": "host", - "schema": "lschema", - "login": "Login", - "password": "None", - "port": "1234" - } - } - -The YAML file structure is similar to that of a JSON. The key-value pair of connection ID and the definitions of one or more connections. -In this format, the connection can be defined as a URI (string) or JSON object. - -.. code-block:: yaml - - CONN_A: 'mysq://host_a' - - CONN_B: - - 'mysq://host_a' - - 'mysq://host_b' - - CONN_C: - conn_type: scheme - host: host - schema: lschema - login: Login - password: None - port: 1234 - extra_dejson: - a: b - nestedblock_dict: - x: y - -You can also define connections using a ``.env`` file. Then the key is the connection ID, and -the value should describe the connection using the URI. If the connection ID is repeated, all values will -be returned. The following is a sample file. - - .. code-block:: text - - mysql_conn_id=mysql://log:password@13.1.21.1:3306/mysqldbrd - google_custom_key=google-cloud-platform://?extra__google_cloud_platform__key_path=%2Fkeys%2Fkey.json - -Storing and Retrieving Variables -"""""""""""""""""""""""""""""""" - -If you have set ``variables_file_path`` as ``/files/my_var.json``, then the backend will read the -file ``/files/my_var.json`` when it looks for variables. - -The file can be defined in ``JSON``, ``YAML`` or ``env`` format. - -The JSON file must contain an object where the key contains the variable key and the value contains -the variable value. The following is a sample JSON file. - - .. code-block:: json - - { - "VAR_A": "some_value", - "var_b": "differnet_value" - } - -The YAML file structure is similar to that of JSON, with key containing the variable key and the value containing -the variable value. The following is a sample YAML file. - - .. code-block:: yaml - - VAR_A: some_value - VAR_B: different_value - -You can also define variable using a ``.env`` file. Then the key is the variable key, and variable should -describe the variable value. The following is a sample file. - - .. code-block:: text - - VAR_A=some_value - var_B=different_value - -.. _ssm_parameter_store_secrets: - -AWS SSM Parameter Store Secrets Backend -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -To enable SSM parameter store, specify :py:class:`~airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend` -as the ``backend`` in ``[secrets]`` section of ``airflow.cfg``. - -Here is a sample configuration: - -.. code-block:: ini - - [secrets] - backend = airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend - backend_kwargs = {"connections_prefix": "/airflow/connections", "variables_prefix": "/airflow/variables", "profile_name": "default"} - -Storing and Retrieving Connections -"""""""""""""""""""""""""""""""""" - -If you have set ``connections_prefix`` as ``/airflow/connections``, then for a connection id of ``smtp_default``, -you would want to store your connection at ``/airflow/connections/smtp_default``. - -Optionally you can supply a profile name to reference aws profile, e.g. defined in ``~/.aws/config``. - -The value of the SSM parameter must be the :ref:`connection URI representation ` -of the connection object. - -Storing and Retrieving Variables -"""""""""""""""""""""""""""""""" - -If you have set ``variables_prefix`` as ``/airflow/variables``, then for an Variable key of ``hello``, -you would want to store your Variable at ``/airflow/variables/hello``. - -Optionally you can supply a profile name to reference aws profile, e.g. defined in ``~/.aws/config``. - -AWS Secrets Manager Backend -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -To enable Secrets Manager, specify :py:class:`~airflow.providers.amazon.aws.secrets.secrets_manager.SecretsManagerBackend` -as the ``backend`` in ``[secrets]`` section of ``airflow.cfg``. - -Here is a sample configuration: - -.. code-block:: ini - - [secrets] - backend = airflow.providers.amazon.aws.secrets.secrets_manager.SecretsManagerBackend - backend_kwargs = {"connections_prefix": "airflow/connections", "variables_prefix": "airflow/variables", "profile_name": "default"} - -To authenticate you can either supply a profile name to reference aws profile, e.g. defined in ``~/.aws/config`` or set -environment variables like ``AWS_ACCESS_KEY_ID``, ``AWS_SECRET_ACCESS_KEY``. - - -Storing and Retrieving Connections -"""""""""""""""""""""""""""""""""" - -If you have set ``connections_prefix`` as ``airflow/connections``, then for a connection id of ``smtp_default``, -you would want to store your connection at ``airflow/connections/smtp_default``. - -Example: - -.. code-block:: bash - - aws secretsmanager put-secret-value \ - --secret-id airflow/connections/smtp_default \ - --secret-string "smtps://user:host@relay.example.com:465" - -Verify that you can get the secret: - -.. code-block:: console - - ❯ aws secretsmanager get-secret-value --secret-id airflow/connections/smtp_default - { - "ARN": "arn:aws:secretsmanager:us-east-2:314524341751:secret:airflow/connections/smtp_default-7meuul", - "Name": "airflow/connections/smtp_default", - "VersionId": "34f90eff-ea21-455a-9c8f-5ee74b21be672", - "SecretString": "smtps://user:host@relay.example.com:465", - "VersionStages": [ - "AWSCURRENT" - ], - "CreatedDate": "2020-04-08T02:10:35.132000+01:00" - } - -The value of the secret must be the :ref:`connection URI representation ` -of the connection object. - -Storing and Retrieving Variables -"""""""""""""""""""""""""""""""" - -If you have set ``variables_prefix`` as ``airflow/variables``, then for an Variable key of ``hello``, -you would want to store your Variable at ``airflow/variables/hello``. - - -.. _hashicorp_vault_secrets: - -Hashicorp Vault Secrets Backend -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -To enable Hashicorp vault to retrieve Airflow connection/variable, specify :py:class:`~airflow.providers.hashicorp.secrets.vault.VaultBackend` -as the ``backend`` in ``[secrets]`` section of ``airflow.cfg``. - -Here is a sample configuration: - -.. code-block:: ini - - [secrets] - backend = airflow.providers.hashicorp.secrets.vault.VaultBackend - backend_kwargs = {"connections_path": "connections", "variables_path": "variables", "mount_point": "airflow", "url": "http://127.0.0.1:8200"} - -The default KV version engine is ``2``, pass ``kv_engine_version: 1`` in ``backend_kwargs`` if you use -KV Secrets Engine Version ``1``. - -You can also set and pass values to Vault client by setting environment variables. All the -environment variables listed at https://www.vaultproject.io/docs/commands/#environment-variables are supported. - -Hence, if you set ``VAULT_ADDR`` environment variable like below, you do not need to pass ``url`` -key to ``backend_kwargs``: - -.. code-block:: bash - - export VAULT_ADDR="http://127.0.0.1:8200" - - -Storing and Retrieving Connections -"""""""""""""""""""""""""""""""""" - -If you have set ``connections_path`` as ``connections`` and ``mount_point`` as ``airflow``, then for a connection id of -``smtp_default``, you would want to store your secret as: - -.. code-block:: bash - - vault kv put airflow/connections/smtp_default conn_uri=smtps://user:host@relay.example.com:465 - -Note that the ``Key`` is ``conn_uri``, ``Value`` is ``postgresql://airflow:airflow@host:5432/airflow`` and -``mount_point`` is ``airflow``. - -You can make a ``mount_point`` for ``airflow`` as follows: - -.. code-block:: bash - - vault secrets enable -path=airflow -version=2 kv - -Verify that you can get the secret from ``vault``: - -.. code-block:: console - - ❯ vault kv get airflow/connections/smtp_default - ====== Metadata ====== - Key Value - --- ----- - created_time 2020-03-19T19:17:51.281721Z - deletion_time n/a - destroyed false - version 1 - - ====== Data ====== - Key Value - --- ----- - conn_uri smtps://user:host@relay.example.com:465 - -The value of the Vault key must be the :ref:`connection URI representation ` -of the connection object to get connection. - -Storing and Retrieving Variables -"""""""""""""""""""""""""""""""" - -If you have set ``variables_path`` as ``variables`` and ``mount_point`` as ``airflow``, then for a variable with -``hello`` as key, you would want to store your secret as: - -.. code-block:: bash - - vault kv put airflow/variables/hello value=world - -Verify that you can get the secret from ``vault``: - -.. code-block:: console - - ❯ vault kv get airflow/variables/hello - ====== Metadata ====== - Key Value - --- ----- - created_time 2020-03-28T02:10:54.301784Z - deletion_time n/a - destroyed false - version 1 - - ==== Data ==== - Key Value - --- ----- - value world - -Note that the secret ``Key`` is ``value``, and secret ``Value`` is ``world`` and -``mount_point`` is ``airflow``. - - -.. _secret_manager_backend: - -GCP Secret Manager Backend -^^^^^^^^^^^^^^^^^^^^^^^^^^ - -To enable GCP Secrets Manager to retrieve connection/variables, specify :py:class:`~airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend` -as the ``backend`` in ``[secrets]`` section of ``airflow.cfg``. - -Available parameters to ``backend_kwargs``: - -* ``connections_prefix``: Specifies the prefix of the secret to read to get Connections. -* ``variables_prefix``: Specifies the prefix of the secret to read to get Variables. -* ``gcp_key_path``: Path to GCP Credential JSON file -* ``gcp_scopes``: Comma-separated string containing GCP scopes -* ``sep``: separator used to concatenate connections_prefix and conn_id. Default: "-" - -Note: The full GCP Secrets Manager secret id should follow the pattern "[a-zA-Z0-9-_]". - -Here is a sample configuration if you want to just retrieve connections: - -.. code-block:: ini - - [secrets] - backend = airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend - backend_kwargs = {"connections_prefix": "airflow-connections", "sep": "-"} - -Here is a sample configuration if you want to just retrieve variables: - -.. code-block:: ini - - [secrets] - backend = airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend - backend_kwargs = {"variables_prefix": "airflow-variables", "sep": "-"} - -and if you want to retrieve both Variables and connections use the following sample config: - -.. code-block:: ini - - [secrets] - backend = airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend - backend_kwargs = {"connections_prefix": "airflow-connections", "variables_prefix": "airflow-variables", "sep": "-"} - - -When ``gcp_key_path`` is not provided, it will use the Application Default Credentials (ADC) to obtain credentials. - -.. note:: - - For more information about the Application Default Credentials (ADC), see: - - * `google.auth.default `__ - * `Setting Up Authentication for Server to Server Production Applications `__ - -The value of the Secrets Manager secret id must be the :ref:`connection URI representation ` -of the connection object. - -.. _roll_your_own_secrets_backend: - -Roll your own secrets backend -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -A secrets backend is a subclass of :py:class:`airflow.secrets.BaseSecretsBackend` and must implement either -:py:meth:`~airflow.secrets.BaseSecretsBackend.get_connections` or :py:meth:`~airflow.secrets.BaseSecretsBackend.get_conn_uri`. - -After writing your backend class, provide the fully qualified class name in the ``backend`` key in the ``[secrets]`` -section of ``airflow.cfg``. - -Additional arguments to your SecretsBackend can be configured in ``airflow.cfg`` by supplying a JSON string to ``backend_kwargs``, which will be passed to the ``__init__`` of your SecretsBackend. -See :ref:`Configuration ` for more details, and :ref:`SSM Parameter Store ` for an example. - -.. note:: - - If you are rolling your own secrets backend, you don't strictly need to use airflow's URI format. But - doing so makes it easier to switch between environment variables, the metastore, and your secrets backend. diff --git a/docs/index.rst b/docs/index.rst index 7bc223dccc972..41d014fd42f1e 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -96,7 +96,6 @@ Content lineage dag-serialization Using the REST API - REST API Migration Guide changelog best-practices faq diff --git a/docs/integration.rst b/docs/integration.rst index 4e8568e09f600..a2002c86099c4 100644 --- a/docs/integration.rst +++ b/docs/integration.rst @@ -27,7 +27,7 @@ Airflow has a mechanism that allows you to expand its functionality and integrat * :doc:`Authentication backends ` * :doc:`Logging ` * :doc:`Tracking systems ` -* :doc:`Secrets backends ` +* :doc:`Secrets backends ` * :doc:`Email backends ` It also has integration with :doc:`Sentry ` service for error tracking. Other applications can also integrate using diff --git a/docs/metrics.rst b/docs/metrics.rst index c31f308f749a4..a91158bad37cc 100644 --- a/docs/metrics.rst +++ b/docs/metrics.rst @@ -88,6 +88,7 @@ Name Description ``sla_email_notification_failure`` Number of failed SLA miss email notification attempts ``ti.start..`` Number of started task in a given dag. Similar to _start but for task ``ti.finish...`` Number of completed task in a given dag. Similar to _end but for task +``dag.callback_exceptions`` Number of exceptions raised from DAG callbacks. When this happens, it means DAG callback is not working. ======================================= ================================================================ Gauges diff --git a/docs/operators-and-hooks-ref.rst b/docs/operators-and-hooks-ref.rst index 17ed2beea94eb..b14f351d8dcf5 100644 --- a/docs/operators-and-hooks-ref.rst +++ b/docs/operators-and-hooks-ref.rst @@ -57,12 +57,16 @@ Fundamentals * - :mod:`airflow.operators.branch_operator` - + * - :mod:`airflow.operators.dagrun_operator` - * - :mod:`airflow.operators.dummy_operator` - + * - :mod:`airflow.operators.email` + - + * - :mod:`airflow.operators.generic_transfer` - @@ -472,9 +476,10 @@ These integrations allow you to perform various operations within the Amazon Web :mod:`airflow.providers.amazon.aws.operators.sagemaker_endpoint_config`, :mod:`airflow.providers.amazon.aws.operators.sagemaker_endpoint`, :mod:`airflow.providers.amazon.aws.operators.sagemaker_model`, + :mod:`airflow.providers.amazon.aws.operators.sagemaker_processing`, :mod:`airflow.providers.amazon.aws.operators.sagemaker_training`, :mod:`airflow.providers.amazon.aws.operators.sagemaker_transform`, - :mod:`airflow.providers.amazon.aws.operators.sagemaker_tuning` + :mod:`airflow.providers.amazon.aws.operators.sagemaker_tuning`, - :mod:`airflow.providers.amazon.aws.sensors.sagemaker_base`, :mod:`airflow.providers.amazon.aws.sensors.sagemaker_endpoint`, :mod:`airflow.providers.amazon.aws.sensors.sagemaker_training`, @@ -502,7 +507,8 @@ These integrations allow you to perform various operations within the Amazon Web :mod:`airflow.providers.amazon.aws.operators.s3_delete_objects`, :mod:`airflow.providers.amazon.aws.operators.s3_list` - :mod:`airflow.providers.amazon.aws.sensors.s3_key`, - :mod:`airflow.providers.amazon.aws.sensors.s3_prefix` + :mod:`airflow.providers.amazon.aws.sensors.s3_prefix`, + :mod:`airflow.providers.amazon.aws.sensors.s3_keys_unchanged` * - `AWS Step Functions `__ - @@ -721,7 +727,7 @@ These integrations allow you to perform various operations within the Google Clo - * - `Datastore `__ - - + - :doc:`How to use ` - :mod:`airflow.providers.google.cloud.hooks.datastore` - :mod:`airflow.providers.google.cloud.operators.datastore` - @@ -1598,12 +1604,6 @@ communication protocols or interface. - :mod:`airflow.providers.ssh.operators.ssh` - - * - `Simple Mail Transfer Protocol (SMTP) `__ - - - - - - :mod:`airflow.providers.email.operators.email` - - - * - `Windows Remote Management (WinRM) `__ - - :mod:`airflow.providers.microsoft.winrm.hooks.winrm` diff --git a/docs/plugins.rst b/docs/plugins.rst index a361b0ba22b6d..8aad28661e71a 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -241,7 +241,6 @@ definitions in Airflow. flask_blueprints = [bp] appbuilder_views = [v_appbuilder_package] appbuilder_menu_items = [appbuilder_mitem] - stat_name_handler = staticmethod(stat_name_dummy_handler) global_operator_extra_links = [GoogleLink(),] operator_extra_links = [S3LogLink(), ] diff --git a/docs/redirects.txt b/docs/redirects.txt index 40eb27b342357..fa69abc8348e2 100644 --- a/docs/redirects.txt +++ b/docs/redirects.txt @@ -69,3 +69,4 @@ howto/operator/google/firebase/index.rst howto/operator/google/index.rst # Other redirects howto/operator/http/http.rst howto/operator/http.rst docs/howto/operator/http/index.rst howto/operator/http.rst +docs/howto/use-alternative-secrets-backend.rst howto/altenative-secrets-backends/index.rst diff --git a/docs/rest-api-ref.rst b/docs/rest-api-ref.rst index 8719965b721d0..11f93f81cd544 100644 --- a/docs/rest-api-ref.rst +++ b/docs/rest-api-ref.rst @@ -24,7 +24,7 @@ available at ``/api/experimental/``. .. warning:: This REST API is deprecated since version 2.0. Please consider using :doc:`the stable REST API `. - For more information on migration, see: :doc:`stable-rest-api/migration`. + For more information on migration, see `UPDATING.md `_ Endpoints --------- diff --git a/docs/stable-rest-api/migration.rst b/docs/stable-rest-api/migration.rst deleted file mode 100644 index cd690d58c6ef5..0000000000000 --- a/docs/stable-rest-api/migration.rst +++ /dev/null @@ -1,181 +0,0 @@ - .. Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - .. http://www.apache.org/licenses/LICENSE-2.0 - - .. Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. - -Migration Guide from Experimental API to Stable API v1 -====================================================== -This article provides guidelines for migrating from experimental REST API to the -stable REST API. - -Introduction ------------- -If your application is still using the experimental API, it is important to -consider migrating to the stable API so that your application continues to -work. - -The stable API exposes many endpoints available through the webserver. Here are the -differences between the two endpoints that will help you migrate from the -experimental REST API to the stable REST API. - -Base Endpoint -^^^^^^^^^^^^^ -The base endpoint for the stable API v1 is ``/api/v1/``. You must change the -experimental base endpoint from ``/api/experimental/`` to ``/api/v1/``. - -Create a dag_run from a given dag_id -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The endpoint for creating a dag_run from a given dag_id has changed from - -.. http:post:: /api/experimental/dags//dag_runs - -to - -.. http:post:: /api/v1/dags/{dag_id}/dagRuns - - -List dag_runs from a specific DAG ID -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The endpoint to get the list of dag_runs for a specific dag_id has changed from - -.. http:get:: /api/experimental/dags//dag_runs - -to - -.. http:get:: /api/v1/dags/{dag_id}/dagRuns - -This endpoint also allows you to filter dag_runs with parameters such as ``start_date``, ``end_date``, ``execution_date`` etc in the query string. -Therefore the operation previously performed by this endpoint - -.. http:get:: /api/experimental/dags//dag_runs/ - -can now be handled with filter parameters in the query string. - -Health endpoint -^^^^^^^^^^^^^^^ -The operation previously performed in the experimental REST API endpoint to check -the health status has changed from - -.. http:get:: /api/experimental/test - -to - -.. http:get:: /api/v1/health - -Task information endpoint -^^^^^^^^^^^^^^^^^^^^^^^^^ -The endpoint for getting task information has changed from - -.. http:get:: /api/experimental/dags//tasks/ - -to - -.. http:get:: /api/v1//dags/{dag_id}/tasks/{task_id} - -Task Instance -^^^^^^^^^^^^^ -The endpoint for getting task instance's public instance variable -has changed from - -.. http:get:: /api/experimental/dags//dag_runs//tasks/ - -to - -.. http:get:: /api/v1/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id} - - -DAG -^^^ -The endpoint for pausing a dag has changed from - -.. http:get:: /api/experimental/dags//paused/ - -to - -.. http:patch:: /api/v1/dags/{dag_id} - -while getting information about the paused state of a dag has changed from - -.. http:get:: /api/experimental/dags//paused - -to - -.. http:get:: /api/v1/dags/{dag_id} - - -Latest DAG Runs -^^^^^^^^^^^^^^^ -The endpoint for getting the latest DagRun for each DAG formatted for the UI -have changed from - -.. http:get:: /api/experimental/latest_runs - -to - -.. http:get:: /api/v1/dags/{dag_id}/dagRuns - -Getting information about latest runs can be accomplished with the help of -filters in the query string of this endpoint. Please check the Stable API -reference documentation for more information - -Get all Pools -^^^^^^^^^^^^^ -The endpoint for getting all pools has changed from - -.. http:get:: /api/experimental/pools - -to - -.. http:get:: /api/v1/pools - -Get pool by a given name -^^^^^^^^^^^^^^^^^^^^^^^^ -The endpoint to get pool by a given name has changed from - -.. http:get:: /api/experimental/pools/ - -to - -.. http:get:: /api/v1/pools/{pool_name} - -Create a Pool -^^^^^^^^^^^^^ -The endpoint for creating a pool has changed from - -.. http:post:: /api/experimental/pools - -to - -.. http:post:: /api/v1/pools - -Delete a Pool -^^^^^^^^^^^^^ -The endpoint for deleting a pool has changed from - -.. http:delete:: /api/experimental/pools/ - -to - -.. http:delete:: /api/v1/pools/{pool_name} - -DAG Lineage -^^^^^^^^^^^ -The endpoint for returning the lineage of a dag have changed from - -.. http:get:: /api/experimental/lineage/// - -to - -.. http:get:: /api/v1/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries diff --git a/docs/timezone.rst b/docs/timezone.rst index c43a63b4d5645..8106c3f1d46db 100644 --- a/docs/timezone.rst +++ b/docs/timezone.rst @@ -59,7 +59,7 @@ The users' selected timezone is stored in LocalStorage so is a pre-browser setti Concepts -------- -Naïve and aware datetime objects +Naive and aware datetime objects '''''''''''''''''''''''''''''''' Python’s datetime.datetime objects have a tzinfo attribute that can be used to store time zone information, diff --git a/kubernetes_tests/test_kubernetes_pod_operator.py b/kubernetes_tests/test_kubernetes_pod_operator.py index 26513b79d32b8..d2946cc555927 100644 --- a/kubernetes_tests/test_kubernetes_pod_operator.py +++ b/kubernetes_tests/test_kubernetes_pod_operator.py @@ -61,6 +61,10 @@ def create_context(task): # noinspection DuplicatedCode,PyUnusedLocal class TestKubernetesPodOperatorSystem(unittest.TestCase): + def get_current_task_name(self): + # reverse test name to make pod name unique (it has limited length) + return "_" + unittest.TestCase.id(self).replace(".", "_")[::-1] + def setUp(self): self.maxDiff = None # pylint: disable=invalid-name self.api_client = ApiClient() @@ -76,7 +80,7 @@ def setUp(self): 'airflow_version': airflow_version.replace('+', '-'), 'execution_date': '2016-01-01T0100000100-a2f50a31f', 'dag_id': 'dag', - 'task_id': 'task', + 'task_id': ANY, 'try_number': '1'}, }, 'spec': { @@ -120,7 +124,7 @@ def test_do_xcom_push_defaults_false(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, config_file=new_config_path, @@ -139,7 +143,7 @@ def test_config_path_move(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test1", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, config_file=new_config_path, @@ -157,7 +161,7 @@ def test_working_pod(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, ) @@ -175,7 +179,7 @@ def test_delete_operator_pod(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, is_delete_operator_pod=True, @@ -194,7 +198,7 @@ def test_pod_hostnetwork(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, hostnetwork=True, @@ -215,7 +219,7 @@ def test_pod_dnspolicy(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, hostnetwork=True, @@ -238,7 +242,7 @@ def test_pod_schedulername(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, schedulername=scheduler_name @@ -260,7 +264,7 @@ def test_pod_node_selectors(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, node_selectors=node_selectors, @@ -287,7 +291,7 @@ def test_pod_resources(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, resources=resources, @@ -335,7 +339,7 @@ def test_pod_affinity(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, affinity=affinity, @@ -356,7 +360,7 @@ def test_port(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, ports=[port], @@ -395,7 +399,7 @@ def test_volume_mount(self): volume_mounts=[volume_mount], volumes=[volume], name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, ) @@ -430,7 +434,7 @@ def test_run_as_user_root(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, security_context=security_context, @@ -455,7 +459,7 @@ def test_run_as_user_non_root(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, security_context=security_context, @@ -480,7 +484,7 @@ def test_fs_group(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, security_context=security_context, @@ -500,7 +504,7 @@ def test_faulty_image(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, startup_timeout_seconds=5, @@ -521,7 +525,7 @@ def test_faulty_service_account(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, startup_timeout_seconds=5, @@ -546,7 +550,7 @@ def test_pod_failure(self): arguments=bad_internal_command, labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, ) @@ -567,7 +571,7 @@ def test_xcom_push(self): arguments=args, labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=True, ) @@ -599,7 +603,7 @@ def test_envs_from_configmaps(self, mock_client, mock_monitor, mock_start): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, configmaps=[configmap], @@ -632,7 +636,7 @@ def test_envs_from_secrets(self, mock_client, monitor_mock, start_mock): secrets=secrets, labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, ) @@ -707,7 +711,7 @@ def test_init_container(self): arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), volumes=[volume], init_containers=[init_container], in_cluster=False, @@ -735,7 +739,7 @@ def test_pod_template_file( start_mock): # pylint: disable=unused-argument from airflow.utils.state import State k = KubernetesPodOperator( - task_id='task', + task_id="task" + self.get_current_task_name(), pod_template_file='tests/kubernetes/pod.yaml', do_xcom_push=True ) @@ -811,7 +815,7 @@ def test_pod_priority_class_name( arguments=["echo 10"], labels={"foo": "bar"}, name="test", - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, priority_class_name=priority_class_name, @@ -834,7 +838,7 @@ def test_pod_name(self): arguments=["echo 10"], labels={"foo": "bar"}, name=pod_name_too_long, - task_id="task", + task_id="task" + self.get_current_task_name(), in_cluster=False, do_xcom_push=False, ) diff --git a/pylintrc b/pylintrc index 43f9658af0bf9..c9c7996bf0b47 100644 --- a/pylintrc +++ b/pylintrc @@ -38,7 +38,7 @@ limit-inference-results=100 # List of plugins (as comma separated values of python modules names) to load, # usually to register additional checkers. -load-plugins=tests.airflow_pylint.do_not_use_asserts +load-plugins=tests.airflow_pylint.do_not_use_asserts,tests.airflow_pylint.disable_checks_for_tests # Pickle collected data for later comparisons. persistent=yes diff --git a/requirements/requirements-python3.8.txt b/requirements/requirements-python3.8.txt index 3eeebe0039690..864fbcd0c0134 100644 --- a/requirements/requirements-python3.8.txt +++ b/requirements/requirements-python3.8.txt @@ -37,7 +37,7 @@ adal==1.2.4 aiohttp==3.6.2 alabaster==0.7.12 alembic==1.4.2 -amqp==2.6.0 +amqp==2.6.1 analytics-python==1.2.9 ansiwrap==0.8.4 apipkg==1.5 @@ -80,7 +80,7 @@ cached-property==1.5.1 cachetools==4.1.1 cassandra-driver==3.20.2 cattrs==1.0.0 -celery==4.4.6 +celery==4.4.7 certifi==2020.6.20 cffi==1.14.0 cfgv==3.1.0 diff --git a/scripts/ci/backport_packages/ci_prepare_and_test_backport_packages.sh b/scripts/ci/backport_packages/ci_prepare_and_test_backport_packages.sh index 7982606de5f09..d756dac169b32 100755 --- a/scripts/ci/backport_packages/ci_prepare_and_test_backport_packages.sh +++ b/scripts/ci/backport_packages/ci_prepare_and_test_backport_packages.sh @@ -25,15 +25,9 @@ export PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:-3.6} "${SCRIPTS_CI_DIR}/backport_packages/ci_test_backport_packages_install_separately.sh" "${SCRIPTS_CI_DIR}/backport_packages/ci_test_backport_packages_import_all_classes.sh" -cd "${AIRFLOW_SOURCES}/backport_packages" || exit 1 - DUMP_FILE="/tmp/airflow_provider_packages_$(date +"%Y%m%d-%H%M%S").tar.gz" cd "${AIRFLOW_SOURCES}/dist" || exit 1 tar -cvzf "${DUMP_FILE}" . echo "Packages are in dist and also tar-gzipped in ${DUMP_FILE}" - -if [[ "${CI:=false}" == "true" ]]; then - curl -F "file=@${DUMP_FILE}" https://file.io -fi diff --git a/scripts/ci/backport_packages/ci_test_backport_packages_import_all_classes.sh b/scripts/ci/backport_packages/ci_test_backport_packages_import_all_classes.sh index 0ba120f453f2b..e1d60c2accdb6 100755 --- a/scripts/ci/backport_packages/ci_test_backport_packages_import_all_classes.sh +++ b/scripts/ci/backport_packages/ci_test_backport_packages_import_all_classes.sh @@ -29,16 +29,6 @@ function run_test_package_import_all_classes() { -v "${AIRFLOW_SOURCES}/airflow/__init__.py:/airflow_sources/airflow/__init__.py:cached" \ -v "${AIRFLOW_SOURCES}/airflow/version.py:/airflow_sources/airflow/version.py:cached" \ -v "${AIRFLOW_SOURCES}/backport_packages/import_all_provider_classes.py:/import_all_provider_classes.py:cached" \ - --env PYTHONDONTWRITEBYTECODE \ - --env INSTALL_AIRFLOW_VERSION \ - --env VERBOSE \ - --env VERBOSE_COMMANDS \ - --env HOST_USER_ID="$(id -ur)" \ - --env HOST_GROUP_ID="$(id -gr)" \ - --env HOST_OS="$(uname -s)" \ - --env HOST_HOME="${HOME}" \ - --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \ - --rm \ "${AIRFLOW_CI_IMAGE}" \ "--" "/opt/airflow/scripts/ci/in_container/run_test_package_import_all_classes.sh" \ | tee -a "${OUTPUT_LOG}" diff --git a/scripts/ci/backport_packages/ci_test_backport_packages_install_separately.sh b/scripts/ci/backport_packages/ci_test_backport_packages_install_separately.sh index 3ed23c72a5a84..98ec6dddf3f13 100755 --- a/scripts/ci/backport_packages/ci_test_backport_packages_install_separately.sh +++ b/scripts/ci/backport_packages/ci_test_backport_packages_install_separately.sh @@ -24,20 +24,9 @@ function run_test_package_installation_separately() { docker run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ -v "${AIRFLOW_SOURCES}/dist:/dist:cached" \ - --env PYTHONDONTWRITEBYTECODE \ - --env INSTALL_AIRFLOW_VERSION \ - --env VERBOSE \ - --env VERBOSE_COMMANDS \ - --env HOST_USER_ID="$(id -ur)" \ - --env HOST_GROUP_ID="$(id -gr)" \ - --env HOST_OS="$(uname -s)" \ - --env HOST_HOME="${HOME}" \ - --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \ - --rm \ "${AIRFLOW_CI_IMAGE}" \ "--" "/opt/airflow/scripts/ci/in_container/run_test_package_installation_separately.sh" \ | tee -a "${OUTPUT_LOG}" - } get_environment_for_builds_on_ci diff --git a/scripts/ci/docker-compose/base.yml b/scripts/ci/docker-compose/base.yml index 0feea60ce0628..7f54b206b77dc 100644 --- a/scripts/ci/docker-compose/base.yml +++ b/scripts/ci/docker-compose/base.yml @@ -39,12 +39,22 @@ services: - RUN_INTEGRATION_TESTS - ONLY_RUN_LONG_RUNNING_TESTS - ONLY_RUN_QUARANTINED_TESTS + - GITHUB_TOKEN + - GITHUB_REPOSITORY + - ISSUE_ID + - NUM_RUNS - BREEZE - INSTALL_AIRFLOW_VERSION - DB_RESET - ENABLED_SYSTEMS - RUN_SYSTEM_TESTS - PYTHON_MAJOR_MINOR_VERSION + - HOST_USER_ID + - HOST_GROUP_ID + - HOST_HOME=${HOME} + - HOST_AIRFLOW_SOURCES=${AIRFLOW_SOURCES} + - HOST_OS + - PYTHONDONTWRITEBYTECODE volumes: # Pass docker to inside of the container so that Kind and Moto tests can use it. - /var/run/docker.sock:/var/run/docker.sock diff --git a/scripts/ci/pre_commit/pre_commit_pylint_tests.sh b/scripts/ci/docker-compose/ci.yml old mode 100755 new mode 100644 similarity index 74% rename from scripts/ci/pre_commit/pre_commit_pylint_tests.sh rename to scripts/ci/docker-compose/ci.yml index 3fc196500867f..1d0c6a4d6b27f --- a/scripts/ci/pre_commit/pre_commit_pylint_tests.sh +++ b/scripts/ci/docker-compose/ci.yml @@ -1,4 +1,3 @@ -#!/usr/bin/env bash # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -15,8 +14,15 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -export FORCE_ANSWER_TO_QUESTIONS=${FORCE_ANSWER_TO_QUESTIONS:="quit"} -export REMEMBER_LAST_ANSWER="true" - -# shellcheck source=scripts/ci/static_checks/ci_pylint_tests.sh -. "$( dirname "${BASH_SOURCE[0]}" )/../static_checks/ci_pylint_tests.sh" "${@}" +--- +version: "2.2" +services: + airflow: + environment: + - GITHUB_HEAD_REF + - GITHUB_REF + - GITHUB_ACTIONS + - GITHUB_SHA + - GITHUB_REPOSITORY + - GITHUB_RUN_ID + - GITHUB_TOKEN diff --git a/scripts/ci/cancel/get_workflow_id.sh b/scripts/ci/docker-compose/files.yml old mode 100755 new mode 100644 similarity index 65% rename from scripts/ci/cancel/get_workflow_id.sh rename to scripts/ci/docker-compose/files.yml index 4fa6187c3b105..5625ca6cfbf3a --- a/scripts/ci/cancel/get_workflow_id.sh +++ b/scripts/ci/docker-compose/files.yml @@ -1,4 +1,3 @@ -#!/usr/bin/env bash # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -15,10 +14,9 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -set -euo pipefail -echo "Getting workflow id for ${WORKFLOW}. Github Repo: ${GITHUB_REPOSITORY}" -URL="https://api.github.com/repos/${GITHUB_REPOSITORY}/actions/workflows/${WORKFLOW}.yml" -echo "Calling URL: ${URL}" -WORKFLOW_ID=$(curl "Authorization: token ${GITHUB_TOKEN}" "${URL}" | jq '.id') -echo "Workflow id for ${WORKFLOW}: ${WORKFLOW_ID}" -echo "::set-env name=WORKFLOW_ID::${WORKFLOW_ID}" +--- +version: "2.2" +services: + airflow: + volumes: + - ../../../files:/files:cached diff --git a/scripts/ci/docker-compose/local-prod.yml b/scripts/ci/docker-compose/local-prod.yml index 10db4e7d907dc..dce04f590e871 100644 --- a/scripts/ci/docker-compose/local-prod.yml +++ b/scripts/ci/docker-compose/local-prod.yml @@ -29,7 +29,6 @@ services: - ../../../.github:/opt/airflow/.github:cached - ../../../.inputrc:/root/.inputrc:cached - ../../../.kube:/root/.kube:cached - - ../../../files:/files:cached - ../../../dist:/dist:cached - ../../../scripts/prod/entrypoint_prod.sh:/entrypoint:cached - ../../../setup.cfg:/opt/airflow/setup.cfg:cached diff --git a/scripts/ci/docker-compose/local.yml b/scripts/ci/docker-compose/local.yml index 3c692c861da0f..df49fb0385000 100644 --- a/scripts/ci/docker-compose/local.yml +++ b/scripts/ci/docker-compose/local.yml @@ -45,7 +45,6 @@ services: - ../../../dags:/opt/airflow/dags:cached - ../../../dev:/opt/airflow/dev:cached - ../../../docs:/opt/airflow/docs:cached - - ../../../files:/files:cached - ../../../dist:/dist:cached - ../../../hooks:/opt/airflow/hooks:cached - ../../../logs:/root/airflow/logs:cached @@ -60,12 +59,5 @@ services: - ../../../tmp:/tmp:cached - ../../../metastore_browser:/opt/airflow/metastore_browser:cached # END automatically generated volumes from LOCAL_MOUNTS in _local_mounts.sh - environment: - - HOST_USER_ID - - HOST_GROUP_ID - - HOST_HOME=${HOME} - - HOST_AIRFLOW_SOURCES=${AIRFLOW_SOURCES} - - HOST_OS - - PYTHONDONTWRITEBYTECODE ports: - "${WEBSERVER_HOST_PORT}:8080" diff --git a/scripts/ci/docs/ci_docs.sh b/scripts/ci/docs/ci_docs.sh index 847cb49f1a1b8..605ce85eb7b92 100755 --- a/scripts/ci/docs/ci_docs.sh +++ b/scripts/ci/docs/ci_docs.sh @@ -23,17 +23,8 @@ export PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:-3.6} function run_docs() { docker run "${EXTRA_DOCKER_FLAGS[@]}" -t \ --entrypoint "/usr/local/bin/dumb-init" \ - --env PYTHONDONTWRITEBYTECODE \ - --env VERBOSE \ - --env VERBOSE_COMMANDS \ - --env HOST_USER_ID="$(id -ur)" \ - --env HOST_GROUP_ID="$(id -gr)" \ - --env HOST_OS="$(uname -s)" \ - --env HOST_HOME="${HOME}" \ - --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \ - --rm \ "${AIRFLOW_CI_IMAGE}" \ - "--" "/opt/airflow/docs/build" \ + "--" "/opt/airflow/scripts/ci/in_container/run_docs_build.sh" \ | tee -a "${OUTPUT_LOG}" } diff --git a/scripts/ci/in_container/_in_container_utils.sh b/scripts/ci/in_container/_in_container_utils.sh index 18af0b2e93003..56d0fc9326ecf 100644 --- a/scripts/ci/in_container/_in_container_utils.sh +++ b/scripts/ci/in_container/_in_container_utils.sh @@ -248,31 +248,18 @@ function setup_kerberos() { } function dump_airflow_logs() { + DUMP_FILE=/files/airflow_logs_$(date "+%Y-%m-%d")_${CI_BUILD_ID:="default"}_${CI_JOB_ID:="default"}.log.tar.gz echo "###########################################################################################" echo " Dumping logs from all the airflow tasks" echo "###########################################################################################" - pushd /root/airflow/ || exit 1 - tar -czf "${1}" logs + pushd "${AIRFLOW_HOME}" || exit 1 + tar -czf "${DUMP_FILE}" logs + echo " Logs dumped to ${DUMP_FILE}" popd || exit 1 echo "###########################################################################################" } -function send_airflow_logs_to_file_io() { - echo "##############################################################################" - echo - echo " DUMPING LOG FILES FROM AIRFLOW AND SENDING THEM TO file.io" - echo - echo "##############################################################################" - DUMP_FILE=/tmp/$(date "+%Y-%m-%d")_airflow_${CI_BUILD_ID:="default"}_${CI_JOB_ID:="default"}.log.tar.gz - dump_airflow_logs "${DUMP_FILE}" - echo - echo " Logs saved to ${DUMP_FILE}" - echo - echo "##############################################################################" - curl -F "file=@${DUMP_FILE}" https://file.io -} - function install_released_airflow_version() { pip uninstall -y apache-airflow || true find /root/airflow/ -type f -print0 | xargs -0 rm -f -- diff --git a/scripts/ci/in_container/entrypoint_ci.sh b/scripts/ci/in_container/entrypoint_ci.sh index d006eaa6652b4..a73b9d861e7da 100755 --- a/scripts/ci/in_container/entrypoint_ci.sh +++ b/scripts/ci/in_container/entrypoint_ci.sh @@ -161,19 +161,21 @@ if [[ "${RUN_TESTS}" != "true" ]]; then fi set -u +export RESULT_LOG_FILE="/files/test_result.xml" + if [[ "${CI}" == "true" ]]; then EXTRA_PYTEST_ARGS=( "--verbosity=0" "--strict-markers" - "--instafail" "--durations=100" "--cov=airflow/" "--cov-config=.coveragerc" - "--cov-report=html:airflow/www/static/coverage/" + "--cov-report=xml:/files/coverage.xml" "--color=yes" "--maxfail=50" "--pythonwarnings=ignore::DeprecationWarning" "--pythonwarnings=ignore::PendingDeprecationWarning" + "--junitxml=${RESULT_LOG_FILE}" ) else EXTRA_PYTEST_ARGS=() @@ -187,25 +189,43 @@ if [[ ${#@} -gt 0 && -n "$1" ]]; then fi if [[ -n ${RUN_INTEGRATION_TESTS:=""} ]]; then + # Integration tests for INT in ${RUN_INTEGRATION_TESTS} do EXTRA_PYTEST_ARGS+=("--integration" "${INT}") done - EXTRA_PYTEST_ARGS+=("-rpfExX") + EXTRA_PYTEST_ARGS+=( + # timeouts in seconds for individual tests + "--setup-timeout=20" + "--execution-timeout=60" + "--teardown-timeout=20" + ) + elif [[ ${ONLY_RUN_LONG_RUNNING_TESTS:=""} == "true" ]]; then EXTRA_PYTEST_ARGS+=( "-m" "long_running" "--include-long-running" "--verbosity=1" - "--reruns" "3" - "--timeout" "90") + "--setup-timeout=30" + "--execution-timeout=120" + "--teardown-timeout=30" + ) elif [[ ${ONLY_RUN_QUARANTINED_TESTS:=""} == "true" ]]; then EXTRA_PYTEST_ARGS+=( "-m" "quarantined" "--include-quarantined" "--verbosity=1" - "--reruns" "3" - "--timeout" "90") + "--setup-timeout=10" + "--execution-timeout=50" + "--teardown-timeout=10" + ) +else + # Core tests + EXTRA_PYTEST_ARGS+=( + "--setup-timeout=10" + "--execution-timeout=30" + "--teardown-timeout=10" + ) fi ARGS=("${EXTRA_PYTEST_ARGS[@]}" "${TESTS_TO_RUN[@]}") diff --git a/scripts/ci/in_container/quarantine_issue_header.md b/scripts/ci/in_container/quarantine_issue_header.md new file mode 100644 index 0000000000000..d672a4de7bf44 --- /dev/null +++ b/scripts/ci/in_container/quarantine_issue_header.md @@ -0,0 +1,32 @@ + + +# Quarantined issues + +Please do not update status or list of the issues manually. It is automatically updated during +Quarantine workflow, when the workflow executes in the context of Apache Airflow repository. +This happens on schedule (4 times a day) or when a change has been merged or pushed +to the relevant branch. + +You can update "Comment" column in the issue list - the update process will read and preserve this column. + +# Status update +Last status update (UTC): {{ DATE_UTC_NOW }} + +# List of Quarantined issues diff --git a/scripts/ci/in_container/run_ci_tests.sh b/scripts/ci/in_container/run_ci_tests.sh index dc86cf0bc0c47..f143ab7a03f76 100755 --- a/scripts/ci/in_container/run_ci_tests.sh +++ b/scripts/ci/in_container/run_ci_tests.sh @@ -30,11 +30,36 @@ RES=$? set +x if [[ "${RES}" == "0" && ${CI:="false"} == "true" ]]; then echo "All tests successful" - bash <(curl -s https://codecov.io/bash) + cp .coverage /files +fi + +MAIN_GITHUB_REPOSITORY="apache/airflow" + +if [[ ${ONLY_RUN_QUARANTINED_TESTS:=} = "true" ]]; then + if [[ ${GITHUB_REPOSITORY} == "${MAIN_GITHUB_REPOSITORY}" ]]; then + if [[ ${RES} == "1" || ${RES} == "0" ]]; then + echo + echo "Pytest exited with ${RES} result. Updating Quarantine Issue!" + echo + "${IN_CONTAINER_DIR}/update_quarantined_test_status.py" "${RESULT_LOG_FILE}" + else + echo + echo "Pytest exited with ${RES} result. NOT Updating Quarantine Issue!" + echo + fi + else + echo + echo "Github repository '${GITHUB_REPOSITORY}'. NOT Updating Quarantine Issue!" + echo + fi +else + echo + echo "Regular tests. NOT Updating Quarantine Issue!" + echo fi if [[ ${CI:=} == "true" ]]; then - send_airflow_logs_to_file_io + dump_airflow_logs fi exit "${RES}" diff --git a/scripts/ci/in_container/run_docs_build.sh b/scripts/ci/in_container/run_docs_build.sh index 8c632f879c7de..6e1b022fb9968 100755 --- a/scripts/ci/in_container/run_docs_build.sh +++ b/scripts/ci/in_container/run_docs_build.sh @@ -23,6 +23,12 @@ HANDLERS="$( trap -p EXIT | cut -f2 -d \' )" # shellcheck disable=SC2064 trap "${HANDLERS}${HANDLERS:+;}in_container_fix_ownership" EXIT -sudo rm -rf "$(pwd)/docs/_build/*" -sudo rm -rf "$(pwd)/docs/_api/*" -sudo -E "$(pwd)/docs/build" +sudo rm -rf "${AIRFLOW_SOURCES}/docs/_build/*" +sudo rm -rf "${AIRFLOW_SOURCES}/docs/_api/*" + +sudo -E "${AIRFLOW_SOURCES}/docs/build" + +if [[ ${CI} == "true" ]]; then + rm -rf "/files/documentation" + cp -r "${AIRFLOW_SOURCES}/docs/_build/html" "/files/documentation" +fi diff --git a/scripts/ci/in_container/run_prepare_backport_packages.sh b/scripts/ci/in_container/run_prepare_backport_packages.sh index c74fb1637cc9c..a491c61f4bf3d 100755 --- a/scripts/ci/in_container/run_prepare_backport_packages.sh +++ b/scripts/ci/in_container/run_prepare_backport_packages.sh @@ -170,16 +170,9 @@ fi popd -AIRFLOW_PACKAGES_TGZ_FILE="/tmp/airflow-packages-$(date +"%Y%m%d-%H%M%S")-${VERSION_SUFFIX_FOR_SVN}${VERSION_SUFFIX_FOR_PYPI}.tar.gz" +AIRFLOW_PACKAGES_TGZ_FILE="/files/airflow-packages-$(date +"%Y%m%d-%H%M%S")-${VERSION_SUFFIX_FOR_SVN}${VERSION_SUFFIX_FOR_PYPI}.tar.gz" tar -cvzf "${AIRFLOW_PACKAGES_TGZ_FILE}" dist/*.whl dist/*.tar.gz echo echo "Airflow packages are in dist folder and tar-gzipped in ${AIRFLOW_PACKAGES_TGZ_FILE}" echo -if [[ "${CI:=false}" == "true" ]]; then - echo - echo "Sending all airflow packages to file.io" - echo - curl -F "file=@${AIRFLOW_PACKAGES_TGZ_FILE}" https://file.io - echo -fi diff --git a/scripts/ci/in_container/run_prepare_backport_readme.sh b/scripts/ci/in_container/run_prepare_backport_readme.sh index 99c818302f883..7af356519ec5d 100755 --- a/scripts/ci/in_container/run_prepare_backport_readme.sh +++ b/scripts/ci/in_container/run_prepare_backport_readme.sh @@ -41,7 +41,7 @@ cd "${AIRFLOW_SOURCES}/backport_packages" || exit 1 python3 setup_backport_packages.py update-package-release-notes "$@" -AIRFLOW_BACKPORT_README_TGZ_FILE="/dist/airflow-backport-readme-$(date +"%Y-%m-%d-%H.%M.%S").tar.gz" +AIRFLOW_BACKPORT_README_TGZ_FILE="/files/airflow-backport-readme-$(date +"%Y-%m-%d-%H.%M.%S").tar.gz" cd "${AIRFLOW_SOURCES}" || exit 1 @@ -50,10 +50,3 @@ find airflow/providers \( -name 'README.md' -o -name 'PROVIDERS_CHANGES*' \) -pr echo echo "Airflow readme for backport packages are tar-gzipped in ${AIRFLOW_BACKPORT_README_TGZ_FILE}" echo -if [[ "${CI:=false}" == "true" ]]; then - echo - echo "Sending all airflow packages to file.io" - echo - curl -F "file=@${AIRFLOW_PACKAGES_TGZ_FILE}" https://file.io - echo -fi diff --git a/scripts/ci/in_container/run_pylint_main.sh b/scripts/ci/in_container/run_pylint.sh similarity index 96% rename from scripts/ci/in_container/run_pylint_main.sh rename to scripts/ci/in_container/run_pylint.sh index 3f2510099287b..222080207dcb7 100755 --- a/scripts/ci/in_container/run_pylint_main.sh +++ b/scripts/ci/in_container/run_pylint.sh @@ -38,8 +38,6 @@ if [[ ${#@} == "0" ]]; then -path "./.eggs" -prune -o \ -path "./docs/_build" -prune -o \ -path "./build" -prune -o \ - -path "./tests" -prune -o \ - -path "./kubernetes_tests" -prune -o \ -name "*.py" \ -not -name 'webserver_config.py' | \ grep ".*.py$" | \ diff --git a/scripts/ci/in_container/run_pylint_tests.sh b/scripts/ci/in_container/run_pylint_tests.sh deleted file mode 100755 index c434137a27134..0000000000000 --- a/scripts/ci/in_container/run_pylint_tests.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# shellcheck source=scripts/ci/in_container/_in_container_script_init.sh -. "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh" - -export PYTHONPATH=${AIRFLOW_SOURCES} - -set +e - -if [[ ${#@} == "0" ]]; then - echo - echo "Running pylint for 'tests' and 'kubernetes_tests' folder" - echo - find "./tests" "./kubernetes_tests" -name "*.py" | \ - grep -vFf scripts/ci/pylint_todo.txt | \ - # running pylint using built-in parallel functionality might speed it up - xargs pylint -j 0 --disable="${DISABLE_CHECKS_FOR_TESTS}" --output-format=colorized | sort -u - RES=$? -else - # running pylint using built-in parallel functionality might speed it up - pylint -j 0 --disable="${DISABLE_CHECKS_FOR_TESTS}" --output-format=colorized "$@" | sort -u - RES=$? -fi - -set -e - -if [[ "${RES}" != 0 ]]; then - echo >&2 - echo >&2 "There were some pylint errors. Exiting" - echo >&2 - exit 1 -else - echo - echo "Pylint check succeeded" - echo -fi diff --git a/scripts/ci/in_container/run_system_tests.sh b/scripts/ci/in_container/run_system_tests.sh index 8cb3c3e440b36..e3d7c7fd9aa7b 100755 --- a/scripts/ci/in_container/run_system_tests.sh +++ b/scripts/ci/in_container/run_system_tests.sh @@ -47,7 +47,7 @@ if [[ "${RES}" == "0" && ${CI} == "true" ]]; then fi if [[ ${CI} == "true" ]]; then - send_airflow_logs_to_file_io + dump_airflow_logs fi in_container_script_end diff --git a/scripts/ci/in_container/update_quarantined_test_status.py b/scripts/ci/in_container/update_quarantined_test_status.py new file mode 100755 index 0000000000000..df35d8edf0ac4 --- /dev/null +++ b/scripts/ci/in_container/update_quarantined_test_status.py @@ -0,0 +1,243 @@ +#!/usr/bin/env python +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import os +import re +import sys +from datetime import datetime +from os.path import dirname, join, realpath +from typing import Dict, List, NamedTuple, Optional +from urllib.parse import urlsplit + +import jinja2 +from bs4 import BeautifulSoup +from github3 import login +from jinja2 import StrictUndefined +from tabulate import tabulate + + +class TestResult(NamedTuple): + test_id: str + file: str + name: str + classname: str + line: str + result: bool + + +class TestHistory(NamedTuple): + test_id: str + name: str + url: str + states: List[bool] + comment: str + + +test_results = [] + +user = "" +repo = "" +issue_id = 0 +num_runs = 10 + +url_pattern = re.compile(r'\[([^]]*)]\(([^)]*)\)') + +status_map: Dict[str, bool] = { + ":heavy_check_mark:": True, + ":x:": False, +} + +reverse_status_map: Dict[bool, str] = {status_map[key]: key for key in status_map.keys()} + + +def get_url(result: TestResult) -> str: + return f"[{result.name}](https://github.com/{user}/{repo}/blob/" \ + f"master/{result.file}?test_id={result.test_id}#L{result.line})" + + +def parse_state_history(history_string: str) -> List[bool]: + history_array = history_string.split(' ') + status_array: List[bool] = [] + for value in history_array: + if value: + status_array.append(status_map[value]) + return status_array + + +def parse_test_history(line: str) -> Optional[TestHistory]: + values = line.split("|") + match_url = url_pattern.match(values[1].strip()) + if match_url: + name = match_url.group(1) + url = match_url.group(0) + http_url = match_url.group(2) + parsed_url = urlsplit(http_url) + the_id = parsed_url[3].split("=")[1] + comment = values[5] if len(values) >= 6 else "" + # noinspection PyBroadException + try: + states = parse_state_history(values[3]) + except Exception: + states = [] + return TestHistory( + test_id=the_id, + name=name, + states=states, + url=url, + comment=comment, + ) + return None + + +def parse_body(body: str) -> Dict[str, TestHistory]: + parse = False + test_history_map: Dict[str, TestHistory] = {} + for line in body.splitlines(keepends=False): + if line.startswith("|-"): + parse = True + continue + if parse: + if not line.startswith("|"): + break + # noinspection PyBroadException + try: + status = parse_test_history(line) + except Exception: + continue + if status: + test_history_map[status.test_id] = status + return test_history_map + + +def update_test_history(history: TestHistory, last_status: bool): + print(f"Adding status to test history: {history}, {last_status}") + return TestHistory( + test_id=history.test_id, + name=history.name, + url=history.url, + states=([last_status] + history.states)[0:num_runs], + comment=history.comment, + ) + + +def create_test_history(result: TestResult) -> TestHistory: + print(f"Creating test history {result}") + return TestHistory( + test_id=result.test_id, + name=result.name, + url=get_url(result), + states=[result.result], + comment="" + ) + + +def get_history_status(history: TestHistory): + if len(history.states) < num_runs: + if all(history.states): + return "So far, so good" + return "Flaky" + if all(history.states): + return "Stable" + if all(history.states[0:num_runs - 1]): + return "Just one more" + if all(history.states[0:int(num_runs / 2)]): + return "Almost there" + return "Flaky" + + +def get_table(history_map: Dict[str, TestHistory]) -> str: + headers = ["Test", "Last run", f"Last {num_runs} runs", "Status", "Comment"] + the_table: List[List[str]] = [] + for ordered_key in sorted(history_map.keys()): + history = history_map[ordered_key] + the_table.append([ + history.url, + "Succeeded" if history.states[0] else "Failed", + " ".join([reverse_status_map[state] for state in history.states]), + get_history_status(history), + history.comment + ]) + return tabulate(the_table, headers, tablefmt="github") + + +if __name__ == '__main__': + if len(sys.argv) < 2: + print("Provide XML JUNIT FILE as first argument") + sys.exit(1) + + with open(sys.argv[1], "r") as f: + text = f.read() + y = BeautifulSoup(text, "html.parser") + res = y.testsuites.testsuite.findAll("testcase") + for test in res: + print("Parsing: " + test['classname'] + "::" + test['name']) + if len(test.contents) > 0 and test.contents[0].name == 'skipped': + print(f"skipping {test['name']}") + continue + test_results.append(TestResult( + test_id=test['classname'] + "::" + test['name'], + file=test['file'], + line=test['line'], + name=test['name'], + classname=test['classname'], + result=len(test.contents) == 0 + )) + + token = os.environ.get("GITHUB_TOKEN") + print(f"Token: {token}") + github_repository = os.environ.get('GITHUB_REPOSITORY') + if not github_repository: + raise Exception("Github Repository must be defined!") + user, repo = github_repository.split("/") + print(f"User: {user}, Repo: {repo}") + issue_id = int(os.environ.get('ISSUE_ID', 0)) + num_runs = int(os.environ.get('NUM_RUNS', 10)) + + if issue_id == 0: + raise Exception("You need to define ISSUE_ID as environment variable") + + gh = login(token=token) + + quarantined_issue = gh.issue(user, repo, issue_id) + print("-----") + print(quarantined_issue.body) + print("-----") + parsed_test_map = parse_body(quarantined_issue.body) + new_test_map: Dict[str, TestHistory] = {} + + for test_result in test_results: + previous_results = parsed_test_map.get(test_result.test_id) + if previous_results: + updated_results = update_test_history( + previous_results, test_result.result) + new_test_map[previous_results.test_id] = updated_results + else: + new_history = create_test_history(test_result) + new_test_map[new_history.test_id] = new_history + table = get_table(new_test_map) + print() + print("Result:") + print() + print(table) + print() + with open(join(dirname(realpath(__file__)), "quarantine_issue_header.md"), "r") as f: + header = jinja2.Template(f.read(), autoescape=True, undefined=StrictUndefined).\ + render(DATE_UTC_NOW=datetime.utcnow()) + quarantined_issue.edit(title=None, + body=header + "\n\n" + str(table), + state='open' if len(test_results) > 0 else 'closed') diff --git a/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh b/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh index bb5a31e069535..5e6c04d354733 100755 --- a/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh +++ b/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh @@ -61,7 +61,7 @@ else "--durations=100" "--cov=airflow/" "--cov-config=.coveragerc" - "--cov-report=html:airflow/www/static/coverage/" + "--cov-report=xml:files/coverage.xml" "--color=yes" "--maxfail=50" "--pythonwarnings=ignore::DeprecationWarning" diff --git a/scripts/ci/libraries/_docker.env b/scripts/ci/libraries/_docker.env new file mode 100644 index 0000000000000..99f3be413c451 --- /dev/null +++ b/scripts/ci/libraries/_docker.env @@ -0,0 +1,29 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +INSTALL_AIRFLOW_VERSION +PYTHONDONTWRITEBYTECODE +VERBOSE +VERBOSE_COMMANDS +HOST_USER_ID +HOST_GROUP_ID +HOST_OS +HOST_HOME +HOST_AIRFLOW_SOURCES +PYTHON_MAJOR_MINOR_VERSION +VERSION_SUFFIX_FOR_PYPI +VERSION_SUFFIX_FOR_SVN +CI diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh index 49afef0f624fd..ce4886806891c 100644 --- a/scripts/ci/libraries/_initialization.sh +++ b/scripts/ci/libraries/_initialization.sh @@ -21,10 +21,6 @@ function initialize_common_environment { # default python Major/Minor version PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:="3.6"} - # extra flags passed to docker run for CI image - # shellcheck disable=SC2034 - EXTRA_DOCKER_FLAGS=() - # extra flags passed to docker run for PROD image # shellcheck disable=SC2034 EXTRA_DOCKER_PROD_BUILD_FLAGS=() @@ -122,15 +118,21 @@ function initialize_common_environment { HOST_OS="$(uname -s)" export HOST_OS + # Home directory of the host user + HOST_HOME="${HOME}" + export HOST_HOME + + # Sources of Airflow on the host. + HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" + export HOST_AIRFLOW_SOURCES # Add the right volume mount for sources, depending which mount strategy is used if [[ ${MOUNT_SOURCE_DIR_FOR_STATIC_CHECKS} == "true" ]]; then print_info print_info "Mount whole airflow source directory for static checks" print_info - EXTRA_DOCKER_FLAGS=( \ - "-v" "${AIRFLOW_SOURCES}:/opt/airflow" \ - "--env" "PYTHONDONTWRITEBYTECODE" \ + EXTRA_DOCKER_FLAGS=( + "-v" "${AIRFLOW_SOURCES}:/opt/airflow" ) elif [[ ${MOUNT_HOST_AIRFLOW_VOLUME} == "true" ]]; then print_info @@ -138,24 +140,21 @@ function initialize_common_environment { print_info read -r -a EXTRA_DOCKER_FLAGS <<< "$(convert_local_mounts_to_docker_params)" + EXTRA_DOCKER_FLAGS+=( + "-v" "${AIRFLOW_SOURCES}/files:/files" + ) else print_info print_info "Skip mounting host volumes to Docker" print_info - EXTRA_DOCKER_FLAGS=( \ - "--env" "PYTHONDONTWRITEBYTECODE" \ - ) + EXTRA_DOCKER_FLAGS=() fi - # In case of the CI build get environment variables from codecov.io and - # set it as the extra docker flags. As described in https://docs.codecov.io/docs/testing-with-docker - if [[ ${CI:=} == "true" ]]; then - CI_CODECOV_ENV="$(bash <(curl -s https://codecov.io/env))" - for ENV_PARAM in ${CI_CODECOV_ENV} - do - EXTRA_DOCKER_FLAGS+=("${ENV_PARAM}") - done - fi + EXTRA_DOCKER_FLAGS+=( + "--rm" + "--env-file" "${AIRFLOW_SOURCES}/scripts/ci/libraries/_docker.env" + ) + export EXTRA_DOCKER_FLAGS # By default we are not upgrading to latest version of constraints when building Docker CI image # This will only be done in cron jobs @@ -287,9 +286,17 @@ function get_environment_for_builds_on_ci() { else export CI_EVENT_TYPE="push" fi + elif [[ "${LOCAL_CI_TESTING:=}" == "true" ]]; then + export CI_TARGET_REPO="apache/airflow" + export CI_TARGET_BRANCH="${DEFAULT_BRANCH:="master"}" + export CI_BUILD_ID="0" + export CI_JOB_ID="0" + export CI_EVENT_TYPE="pull_request" + export CI_SOURCE_REPO="apache/airflow" + export CI_SOURCE_BRANCH="${DEFAULT_BRANCH:="master"}" else echo - echo "ERROR! Unknown CI environment. Exiting" + echo "ERROR! Unknown CI environment. You can set LOCAL_CI_TESTING=\"true\" to run it locally." exit 1 fi fi diff --git a/scripts/ci/libraries/_local_mounts.sh b/scripts/ci/libraries/_local_mounts.sh index 78634d8946528..23c7bb72e8ced 100644 --- a/scripts/ci/libraries/_local_mounts.sh +++ b/scripts/ci/libraries/_local_mounts.sh @@ -41,7 +41,6 @@ function generate_local_mounts_list { "$prefix"dags:/opt/airflow/dags:cached "$prefix"dev:/opt/airflow/dev:cached "$prefix"docs:/opt/airflow/docs:cached - "$prefix"files:/files:cached "$prefix"dist:/dist:cached "$prefix"hooks:/opt/airflow/hooks:cached "$prefix"logs:/root/airflow/logs:cached diff --git a/scripts/ci/libraries/_runs.sh b/scripts/ci/libraries/_runs.sh index a6795a559abfb..9608d57e3758d 100644 --- a/scripts/ci/libraries/_runs.sh +++ b/scripts/ci/libraries/_runs.sh @@ -20,15 +20,6 @@ function run_docs() { verbose_docker run "${EXTRA_DOCKER_FLAGS[@]}" -t \ --entrypoint "/usr/local/bin/dumb-init" \ - --env PYTHONDONTWRITEBYTECODE \ - --env VERBOSE \ - --env VERBOSE_COMMANDS \ - --env HOST_USER_ID="$(id -ur)" \ - --env HOST_GROUP_ID="$(id -gr)" \ - --env HOST_OS="$(uname -s)" \ - --env HOST_HOME="${HOME}" \ - --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \ - --rm \ "${AIRFLOW_CI_IMAGE}" \ "--" "/opt/airflow/docs/build" \ | tee -a "${OUTPUT_LOG}" @@ -38,16 +29,6 @@ function run_docs() { function run_generate_constraints() { docker run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ - --env PYTHONDONTWRITEBYTECODE \ - --env VERBOSE \ - --env VERBOSE_COMMANDS \ - --env HOST_USER_ID="$(id -ur)" \ - --env HOST_GROUP_ID="$(id -gr)" \ - --env HOST_OS="$(uname -s)" \ - --env HOST_HOME="${HOME}" \ - --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \ - --env PYTHON_MAJOR_MINOR_VERSION \ - --rm \ "${AIRFLOW_CI_IMAGE}" \ "--" "/opt/airflow/scripts/ci/in_container/run_generate_constraints.sh" \ | tee -a "${OUTPUT_LOG}" @@ -57,20 +38,8 @@ function run_generate_constraints() { function run_prepare_backport_packages() { docker run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ - --env PYTHONDONTWRITEBYTECODE \ - --env VERBOSE \ - --env VERBOSE_COMMANDS \ - --env HOST_USER_ID="$(id -ur)" \ - --env HOST_GROUP_ID="$(id -gr)" \ - --env HOST_OS="$(uname -s)" \ - --env HOST_HOME="${HOME}" \ - --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \ - --env PYTHON_MAJOR_MINOR_VERSION \ - --env VERSION_SUFFIX_FOR_PYPI \ - --env VERSION_SUFFIX_FOR_SVN \ -t \ -v "${AIRFLOW_SOURCES}:/opt/airflow" \ - --rm \ "${AIRFLOW_CI_IMAGE}" \ "--" "/opt/airflow/scripts/ci/in_container/run_prepare_backport_packages.sh" "${@}" \ | tee -a "${OUTPUT_LOG}" @@ -80,18 +49,8 @@ function run_prepare_backport_packages() { function run_prepare_backport_readme() { docker run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ - --env PYTHONDONTWRITEBYTECODE \ - --env VERBOSE \ - --env VERBOSE_COMMANDS \ - --env HOST_USER_ID="$(id -ur)" \ - --env HOST_GROUP_ID="$(id -gr)" \ - --env HOST_OS="$(uname -s)" \ - --env HOST_HOME="${HOME}" \ - --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \ - --env PYTHON_MAJOR_MINOR_VERSION \ -t \ -v "${AIRFLOW_SOURCES}:/opt/airflow" \ - --rm \ "${AIRFLOW_CI_IMAGE}" \ "--" "/opt/airflow/scripts/ci/in_container/run_prepare_backport_readme.sh" "${@}" \ | tee -a "${OUTPUT_LOG}" diff --git a/scripts/ci/openapi/client_codegen_diff.sh b/scripts/ci/openapi/client_codegen_diff.sh index 9a8511b70534f..688a06f9034ea 100755 --- a/scripts/ci/openapi/client_codegen_diff.sh +++ b/scripts/ci/openapi/client_codegen_diff.sh @@ -48,4 +48,4 @@ mkdir -p ./clients/go_target_branch/airflow git reset --hard "${TARGET_REMOTE}/${CI_TARGET_BRANCH}" ./clients/gen/go.sh ./airflow/api_connexion/openapi/v1.yaml ./clients/go_target_branch/airflow -diff ./clients/go_target_branch/airflow ./clients/go/airflow || true +diff -u ./clients/go_target_branch/airflow ./clients/go/airflow || true diff --git a/scripts/ci/pre_commit/pre_commit_pylint_main.sh b/scripts/ci/pre_commit/pre_commit_pylint.sh similarity index 86% rename from scripts/ci/pre_commit/pre_commit_pylint_main.sh rename to scripts/ci/pre_commit/pre_commit_pylint.sh index 39c9d785eda87..764617812eda1 100755 --- a/scripts/ci/pre_commit/pre_commit_pylint_main.sh +++ b/scripts/ci/pre_commit/pre_commit_pylint.sh @@ -18,5 +18,5 @@ export FORCE_ANSWER_TO_QUESTIONS=${FORCE_ANSWER_TO_QUESTIONS:="quit"} export REMEMBER_LAST_ANSWER="true" -# shellcheck source=scripts/ci/static_checks/ci_pylint_main.sh -. "$( dirname "${BASH_SOURCE[0]}" )/../static_checks/ci_pylint_main.sh" "${@}" +# shellcheck source=scripts/ci/static_checks/ci_pylint.sh +. "$( dirname "${BASH_SOURCE[0]}" )/../static_checks/ci_pylint.sh" "${@}" diff --git a/scripts/ci/speccy_rules/connexion.yml b/scripts/ci/speccy_rules/connexion.yml new file mode 100644 index 0000000000000..4a0c057d50ead --- /dev/null +++ b/scripts/ci/speccy_rules/connexion.yml @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- +rules: + - name: operation-x-openapi-router-controller + object: operation + description: operation should have a x-openapi-router-controller attribute + truthy: x-openapi-router-controller diff --git a/scripts/ci/static_checks/ci_check_license.sh b/scripts/ci/static_checks/ci_check_license.sh index 3d887c4219f48..5840ab407498e 100755 --- a/scripts/ci/static_checks/ci_check_license.sh +++ b/scripts/ci/static_checks/ci_check_license.sh @@ -34,7 +34,6 @@ function run_check_license() { if ! docker run "${EXTRA_DOCKER_FLAGS[@]}" -t \ --user "$(id -ur):$(id -gr)" \ - --rm \ ashb/apache-rat:0.13-1 \ --exclude-file /opt/airflow/.rat-excludes \ --d /opt/airflow | tee "${AIRFLOW_SOURCES}/logs/rat-results.txt" ; then diff --git a/scripts/ci/static_checks/ci_flake8.sh b/scripts/ci/static_checks/ci_flake8.sh index 4ebd060328899..e2b437d4bae8d 100755 --- a/scripts/ci/static_checks/ci_flake8.sh +++ b/scripts/ci/static_checks/ci_flake8.sh @@ -26,30 +26,12 @@ function run_flake8() { if [[ "${#FILES[@]}" == "0" ]]; then docker run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ - --env PYTHONDONTWRITEBYTECODE \ - --env VERBOSE \ - --env VERBOSE_COMMANDS \ - --env HOST_USER_ID="$(id -ur)" \ - --env HOST_GROUP_ID="$(id -gr)" \ - --env HOST_OS="$(uname -s)" \ - --env HOST_HOME="${HOME}" \ - --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \ - --rm \ "${AIRFLOW_CI_IMAGE}" \ "--" "/opt/airflow/scripts/ci/in_container/run_flake8.sh" \ | tee -a "${OUTPUT_LOG}" else docker run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ - --env PYTHONDONTWRITEBYTECODE \ - --env VERBOSE \ - --env VERBOSE_COMMANDS \ - --env HOST_USER_ID="$(id -ur)" \ - --env HOST_GROUP_ID="$(id -gr)" \ - --env HOST_OS="$(uname -s)" \ - --env HOST_HOME="${HOME}" \ - --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \ - --rm \ "${AIRFLOW_CI_IMAGE}" \ "--" "/opt/airflow/scripts/ci/in_container/run_flake8.sh" "${FILES[@]}" \ | tee -a "${OUTPUT_LOG}" diff --git a/scripts/ci/static_checks/ci_mypy.sh b/scripts/ci/static_checks/ci_mypy.sh index 8d6aeba19cd32..45072cbce1e7f 100755 --- a/scripts/ci/static_checks/ci_mypy.sh +++ b/scripts/ci/static_checks/ci_mypy.sh @@ -28,16 +28,7 @@ function run_mypy() { docker run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ - --env PYTHONDONTWRITEBYTECODE \ - --env VERBOSE \ - --env VERBOSE_COMMANDS \ - --env HOST_USER_ID="$(id -ur)" \ - --env HOST_GROUP_ID="$(id -gr)" \ - --env HOST_OS="$(uname -s)" \ - --env HOST_HOME="${HOME}" \ - --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \ "-v" "${AIRFLOW_SOURCES}/.mypy_cache:/opt/airflow/.mypy_cache" \ - --rm \ "${AIRFLOW_CI_IMAGE}" \ "--" "/opt/airflow/scripts/ci/in_container/run_mypy.sh" "${FILES[@]}" \ | tee -a "${OUTPUT_LOG}" diff --git a/scripts/ci/static_checks/ci_pylint_main.sh b/scripts/ci/static_checks/ci_pylint.sh similarity index 68% rename from scripts/ci/static_checks/ci_pylint_main.sh rename to scripts/ci/static_checks/ci_pylint.sh index 09083b6e9e92a..72f8d5995248d 100755 --- a/scripts/ci/static_checks/ci_pylint_main.sh +++ b/scripts/ci/static_checks/ci_pylint.sh @@ -20,37 +20,19 @@ export PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:-3.6} # shellcheck source=scripts/ci/libraries/_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" -function run_pylint_main() { +function run_pylint() { FILES=("$@") if [[ "${#FILES[@]}" == "0" ]]; then docker run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ - --env PYTHONDONTWRITEBYTECODE \ - --env VERBOSE \ - --env VERBOSE_COMMANDS \ - --env HOST_USER_ID="$(id -ur)" \ - --env HOST_GROUP_ID="$(id -gr)" \ - --env HOST_OS="$(uname -s)" \ - --env HOST_HOME="${HOME}" \ - --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \ - --rm \ "${AIRFLOW_CI_IMAGE}" \ - "--" "/opt/airflow/scripts/ci/in_container/run_pylint_main.sh" \ + "--" "/opt/airflow/scripts/ci/in_container/run_pylint.sh" \ | tee -a "${OUTPUT_LOG}" else docker run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ - --env PYTHONDONTWRITEBYTECODE \ - --env VERBOSE \ - --env VERBOSE_COMMANDS \ - --env HOST_USER_ID="$(id -ur)" \ - --env HOST_GROUP_ID="$(id -gr)" \ - --env HOST_OS="$(uname -s)" \ - --env HOST_HOME="${HOME}" \ - --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \ - --rm \ "${AIRFLOW_CI_IMAGE}" \ - "--" "/opt/airflow/scripts/ci/in_container/run_pylint_main.sh" "${FILES[@]}" \ + "--" "/opt/airflow/scripts/ci/in_container/run_pylint.sh" "${FILES[@]}" \ | tee -a "${OUTPUT_LOG}" fi } @@ -67,8 +49,8 @@ if [[ "${#@}" != "0" ]]; then if [[ "${#FILTERED_FILES[@]}" == "0" ]]; then echo "Filtered out all files. Skipping pylint." else - run_pylint_main "${FILTERED_FILES[@]}" + run_pylint "${FILTERED_FILES[@]}" fi else - run_pylint_main + run_pylint fi diff --git a/scripts/ci/static_checks/ci_pylint_tests.sh b/scripts/ci/static_checks/ci_pylint_tests.sh deleted file mode 100755 index 191af0459030b..0000000000000 --- a/scripts/ci/static_checks/ci_pylint_tests.sh +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -export PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:-3.6} - -# shellcheck source=scripts/ci/libraries/_script_init.sh -. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" - -function run_pylint_tests() { - FILES=("$@") - if [[ "${#FILES[@]}" == "0" ]]; then - docker run "${EXTRA_DOCKER_FLAGS[@]}" \ - --entrypoint "/usr/local/bin/dumb-init" \ - --env PYTHONDONTWRITEBYTECODE \ - --env VERBOSE \ - --env VERBOSE_COMMANDS \ - --env HOST_USER_ID="$(id -ur)" \ - --env HOST_GROUP_ID="$(id -gr)" \ - --env HOST_OS="$(uname -s)" \ - --env HOST_HOME="${HOME}" \ - --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \ - --rm \ - "${AIRFLOW_CI_IMAGE}" \ - "--" "/opt/airflow/scripts/ci/in_container/run_pylint_tests.sh" \ - | tee -a "${OUTPUT_LOG}" - else - docker run "${EXTRA_DOCKER_FLAGS[@]}" \ - --entrypoint "/usr/local/bin/dumb-init" \ - --env PYTHONDONTWRITEBYTECODE \ - --env VERBOSE \ - --env VERBOSE_COMMANDS \ - --env HOST_USER_ID="$(id -ur)" \ - --env HOST_GROUP_ID="$(id -gr)" \ - --env HOST_OS="$(uname -s)" \ - --env HOST_HOME="${HOME}" \ - --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \ - --rm \ - "${AIRFLOW_CI_IMAGE}" \ - "--" "/opt/airflow/scripts/ci/in_container/run_pylint_tests.sh" "${FILES[@]}" \ - | tee -a "${OUTPUT_LOG}" - fi -} - -get_environment_for_builds_on_ci - -prepare_ci_build - -rebuild_ci_image_if_needed - -if [[ "${#@}" != "0" ]]; then - filter_out_files_from_pylint_todo_list "$@" - - if [[ "${#FILTERED_FILES[@]}" == "0" ]]; then - echo "Filtered out all files. Skipping pylint." - else - run_pylint_tests "${FILTERED_FILES[@]}" - fi -else - run_pylint_tests -fi diff --git a/scripts/ci/static_checks/ci_refresh_pylint_todo.sh b/scripts/ci/static_checks/ci_refresh_pylint_todo.sh index 202f6b345fd9c..79c196ac81618 100755 --- a/scripts/ci/static_checks/ci_refresh_pylint_todo.sh +++ b/scripts/ci/static_checks/ci_refresh_pylint_todo.sh @@ -22,15 +22,6 @@ export FORCE_ANSWER_TO_QUESTIONS=quit function refresh_pylint_todo() { docker run "${EXTRA_DOCKER_FLAGS[@]}" \ - --env PYTHONDONTWRITEBYTECODE \ - --env VERBOSE \ - --env VERBOSE_COMMANDS \ - --env HOST_USER_ID="$(id -ur)" \ - --env HOST_GROUP_ID="$(id -gr)" \ - --env HOST_OS="$(uname -s)" \ - --env HOST_HOME="${HOME}" \ - --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \ - --rm \ "${AIRFLOW_CI_IMAGE}" \ /opt/airflow/scripts/ci/in_container/refresh_pylint_todo.sh \ | tee -a "${OUTPUT_LOG}" diff --git a/scripts/ci/testing/ci_run_airflow_testing.sh b/scripts/ci/testing/ci_run_airflow_testing.sh index 884b164f55907..0e7393be86949 100755 --- a/scripts/ci/testing/ci_run_airflow_testing.sh +++ b/scripts/ci/testing/ci_run_airflow_testing.sh @@ -30,14 +30,50 @@ fi function run_airflow_testing_in_docker() { set +u - # shellcheck disable=SC2016 - docker-compose --log-level INFO \ - -f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \ - -f "${SCRIPTS_CI_DIR}/docker-compose/backend-${BACKEND}.yml" \ - "${INTEGRATIONS[@]}" \ - "${DOCKER_COMPOSE_LOCAL[@]}" \ - run airflow "${@}" + set +e + for TRY_NUM in {1..3} + do + echo + echo "Starting try number ${TRY_NUM}" + echo + docker-compose --log-level INFO \ + -f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \ + -f "${SCRIPTS_CI_DIR}/docker-compose/backend-${BACKEND}.yml" \ + "${INTEGRATIONS[@]}" \ + "${DOCKER_COMPOSE_LOCAL[@]}" \ + run airflow "${@}" + EXIT_CODE=$? + if [[ ${EXIT_CODE} == 254 ]]; then + echo + echo "Failed starting integration on ${TRY_NUM} try. Wiping-out docker-compose remnants" + echo + docker-compose --log-level INFO \ + -f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \ + down --remove-orphans -v --timeout 5 + echo + echo "Sleeping 5 seconds" + echo + sleep 5 + continue + else + break + fi + done + if [[ ${ONLY_RUN_QUARANTINED_TESTS:=} == "true" ]]; then + if [[ ${EXIT_CODE} == "1" ]]; then + echo + echo "Some Quarantined tests failed. but we recorded it in an issue" + echo + EXIT_CODE="0" + else + echo + echo "All Quarantined tests succeeded" + echo + fi + fi set -u + set -e + return "${EXIT_CODE}" } get_environment_for_builds_on_ci @@ -52,10 +88,13 @@ export BACKEND=${BACKEND:="sqlite"} # Whether necessary for airflow run local sources are mounted to docker export MOUNT_LOCAL_SOURCES=${MOUNT_LOCAL_SOURCES:="false"} -# whethere verbose output should be produced +# Whether files folder is mounted to docker +export MOUNT_FILES=${MOUNT_FILES:="true"} + +# whether verbose output should be produced export VERBOSE=${VERBOSE:="false"} -# whethere verbose commadns output (set-x) should be used +# whether verbose commands output (set -x) should be used export VERBOSE_COMMANDS=${VERBOSE_COMMANDS:="false"} # Forwards host credentials to the container @@ -64,10 +103,18 @@ export FORWARD_CREDENTIALS=${FORWARD_CREDENTIALS:="false"} # Installs different airflow version than current from the sources export INSTALL_AIRFLOW_VERSION=${INSTALL_AIRFLOW_VERSION:=""} +DOCKER_COMPOSE_LOCAL=() + if [[ ${MOUNT_LOCAL_SOURCES} == "true" ]]; then - DOCKER_COMPOSE_LOCAL=("-f" "${SCRIPTS_CI_DIR}/docker-compose/local.yml") -else - DOCKER_COMPOSE_LOCAL=() + DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/local.yml") +fi + +if [[ ${MOUNT_FILES} == "true" ]]; then + DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/files.yml") +fi + +if [[ ${CI} == "true" ]]; then + DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/ci.yml") fi if [[ ${FORWARD_CREDENTIALS} == "true" ]]; then @@ -93,6 +140,7 @@ elif [[ ${TEST_TYPE:=} == "Long" ]]; then export ONLY_RUN_LONG_RUNNING_TESTS="true" elif [[ ${TEST_TYPE:=} == "Quarantined" ]]; then export ONLY_RUN_QUARANTINED_TESTS="true" + # Do not fail in quarantined tests fi for _INT in ${ENABLED_INTEGRATIONS} @@ -104,3 +152,7 @@ done RUN_INTEGRATION_TESTS=${RUN_INTEGRATION_TESTS:=""} run_airflow_testing_in_docker "${@}" + +if [[ ${TEST_TYPE:=} == "Quarantined" ]]; then + export ONLY_RUN_QUARANTINED_TESTS="true" +fi diff --git a/scripts/ci/tools/ci_clear_tmp.sh b/scripts/ci/tools/ci_clear_tmp.sh index 9a8cb4a6fc67c..ff5f6018691ce 100755 --- a/scripts/ci/tools/ci_clear_tmp.sh +++ b/scripts/ci/tools/ci_clear_tmp.sh @@ -40,5 +40,6 @@ export HOST_OS docker-compose \ -f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \ -f "${SCRIPTS_CI_DIR}/docker-compose/local.yml" \ + -f "${SCRIPTS_CI_DIR}/docker-compose/files.yml" \ run --entrypoint /bin/bash \ airflow -c /opt/airflow/scripts/ci/in_container/run_clear_tmp.sh diff --git a/scripts/ci/tools/ci_fix_ownership.sh b/scripts/ci/tools/ci_fix_ownership.sh index d3ae4bad1db02..762f41e01b34f 100755 --- a/scripts/ci/tools/ci_fix_ownership.sh +++ b/scripts/ci/tools/ci_fix_ownership.sh @@ -41,6 +41,7 @@ export BACKEND="sqlite" docker-compose \ -f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \ -f "${SCRIPTS_CI_DIR}/docker-compose/local.yml" \ + -f "${SCRIPTS_CI_DIR}/docker-compose/files.yml" \ -f "${SCRIPTS_CI_DIR}/docker-compose/forward-credentials.yml" \ run --entrypoint /bin/bash \ airflow -c /opt/airflow/scripts/ci/in_container/run_fix_ownership.sh diff --git a/setup.py b/setup.py index 6ca3704a8c0ce..1f055e012df50 100644 --- a/setup.py +++ b/setup.py @@ -238,7 +238,7 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'docker~=3.0', ] druid = [ - 'pydruid>=0.4.1,<=0.5.8', + 'pydruid>=0.4.1', ] elasticsearch = [ 'elasticsearch>7, <7.6.0', @@ -267,9 +267,9 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'google-cloud-bigtable>=1.0.0', 'google-cloud-container>=0.1.1,<2.0', 'google-cloud-datacatalog>=0.5.0,<0.8', - 'google-cloud-dataproc>=0.5.0', + 'google-cloud-dataproc>=1.0.1', 'google-cloud-dlp>=0.11.0', - 'google-cloud-kms>=1.2.1', + 'google-cloud-kms>=1.2.1,<2.0.0', 'google-cloud-language>=1.1.1', 'google-cloud-logging>=1.14.0', 'google-cloud-monitoring>=0.34.0', @@ -409,7 +409,7 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'statsd>=3.3.0, <4.0', ] tableau = [ - 'tableauserverclient==0.9', + 'tableauserverclient~=0.12', ] vertica = [ 'vertica-python>=0.5.1', @@ -451,6 +451,7 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'flake8-colors', 'flaky', 'freezegun', + 'github3.py', 'gitpython', 'ipdb', 'jira', @@ -462,11 +463,11 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'pre-commit', 'pylint==2.5.3', 'pysftp', - 'pytest', + 'pytest<6.0.0', # FIXME: pylint complaining for pytest.mark.* on v6.0 'pytest-cov', 'pytest-instafail', 'pytest-rerunfailures', - 'pytest-timeout', + 'pytest-timeouts', 'pytest-xdist', 'pywinrm', 'qds-sdk>=1.9.6', @@ -510,7 +511,6 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version "discord": [], "docker": docker, "elasticsearch": [], - "email": [], "exasol": exasol, "facebook": facebook, "ftp": [], @@ -702,7 +702,7 @@ def is_package_excluded(package: str, exclusion_list: List[str]): 'dill>=0.2.2, <0.4', 'flask>=1.1.0, <2.0', 'flask-appbuilder>2.3.4,~=3.0', - 'flask-caching>=1.3.3, <1.4.0', + 'flask-caching>=1.3.3, <2.0.0', 'flask-login>=0.3, <0.5', 'flask-swagger==0.2.13', 'flask-wtf>=0.14.2, <0.15', @@ -710,7 +710,7 @@ def is_package_excluded(package: str, exclusion_list: List[str]): 'graphviz>=0.12', 'gunicorn>=19.5.0, <20.0', 'iso8601>=0.1.12', - 'jinja2>=2.10.1, <2.11.0', + 'jinja2>=2.10.1, <2.12.0', 'json-merge-patch==0.2', 'jsonschema~=3.0', 'lazy_object_proxy~=1.3', @@ -732,7 +732,7 @@ def is_package_excluded(package: str, exclusion_list: List[str]): 'sqlalchemy~=1.3', 'sqlalchemy_jsonfield~=0.9', 'tabulate>=0.7.5, <0.9', - 'tenacity==4.12.0', + 'tenacity>=4.12.0, <5.2', 'thrift>=0.9.2', 'typing;python_version<"3.6"', 'typing-extensions>=3.7.4;python_version<"3.8"', diff --git a/tests/airflow_pylint/disable_checks_for_tests.py b/tests/airflow_pylint/disable_checks_for_tests.py new file mode 100644 index 0000000000000..ddaae0b9d478c --- /dev/null +++ b/tests/airflow_pylint/disable_checks_for_tests.py @@ -0,0 +1,60 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +from astroid import MANAGER, scoped_nodes +from pylint.lint import PyLinter + +DISABLED_CHECKS_FOR_TESTS = \ + "missing-docstring, no-self-use, too-many-public-methods, protected-access, do-not-use-asserts" + + +def register(_: PyLinter): + """ + Skip registering any plugin. This is not a real plugin - we only need it to register transform before + running pylint. + + :param _: + :return: + """ + + +def transform(mod): + """ + It's a small hack but one that gives us a lot of speedup in pylint tests. We are replacing the first + line of the file with pylint-disable (or update existing one) when file name start with `test_` or + (for providers) when it is the full path of the package (both cases occur in pylint) + + :param mod: astroid module + :return: None + """ + if mod.name.startswith("test_") or \ + mod.name.startswith("tests.") or \ + mod.name.startswith("kubernetes_tests."): + decoded_lines = mod.stream().read().decode("utf-8").split("\n") + if decoded_lines[0].startswith("# pylint: disable="): + decoded_lines[0] = decoded_lines[0] + " " + DISABLED_CHECKS_FOR_TESTS + elif decoded_lines[0].startswith("#") or decoded_lines[0].strip() == "": + decoded_lines[0] = "# pylint: disable=" + DISABLED_CHECKS_FOR_TESTS + else: + raise Exception(f"The first line of module {mod.name} is not a comment or empty. " + f"Please make sure it is!") + # pylint will read from `.file_bytes` attribute later when tokenization + mod.file_bytes = "\n".join(decoded_lines).encode("utf-8") + + +MANAGER.register_transform(scoped_nodes.Module, transform) diff --git a/tests/api/common/experimental/test_mark_tasks.py b/tests/api/common/experimental/test_mark_tasks.py index a7df82e06ba5f..4c48383a0e773 100644 --- a/tests/api/common/experimental/test_mark_tasks.py +++ b/tests/api/common/experimental/test_mark_tasks.py @@ -50,7 +50,7 @@ def setUpClass(cls): cls.dag3 = dagbag.dags['example_trigger_target_dag'] cls.dag3.sync_to_db() cls.execution_dates = [days_ago(2), days_ago(1)] - start_date3 = cls.dag3.default_args["start_date"] + start_date3 = cls.dag3.start_date cls.dag3_execution_dates = [start_date3, start_date3 + timedelta(days=1), start_date3 + timedelta(days=2)] @@ -64,7 +64,7 @@ def setUp(self): dr.verify_integrity() drs = _create_dagruns(self.dag2, - [self.dag2.default_args['start_date']], + [self.dag2.start_date], state=State.RUNNING, run_type=DagRunType.SCHEDULED) diff --git a/tests/cli/commands/test_config_command.py b/tests/cli/commands/test_config_command.py index be1ad5a5247a4..c7c8925b9e1fe 100644 --- a/tests/cli/commands/test_config_command.py +++ b/tests/cli/commands/test_config_command.py @@ -24,7 +24,7 @@ from tests.test_utils.config import conf_vars -class TestCliConfig(unittest.TestCase): +class TestCliConfigList(unittest.TestCase): @classmethod def setUpClass(cls): cls.parser = cli_parser.get_parser() @@ -32,7 +32,7 @@ def setUpClass(cls): @mock.patch("airflow.cli.commands.config_command.io.StringIO") @mock.patch("airflow.cli.commands.config_command.conf") def test_cli_show_config_should_write_data(self, mock_conf, mock_stringio): - config_command.show_config(self.parser.parse_args(['config', '--color', 'off'])) + config_command.show_config(self.parser.parse_args(['config', 'list', '--color', 'off'])) mock_conf.write.assert_called_once_with(mock_stringio.return_value.__enter__.return_value) @conf_vars({ @@ -40,6 +40,49 @@ def test_cli_show_config_should_write_data(self, mock_conf, mock_stringio): }) def test_cli_show_config_should_display_key(self): with contextlib.redirect_stdout(io.StringIO()) as temp_stdout: - config_command.show_config(self.parser.parse_args(['config', '--color', 'off'])) + config_command.show_config(self.parser.parse_args(['config', 'list', '--color', 'off'])) self.assertIn('[core]', temp_stdout.getvalue()) self.assertIn('testkey = test_value', temp_stdout.getvalue()) + + +class TestCliConfigGetValue(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.parser = cli_parser.get_parser() + + @conf_vars({ + ('core', 'test_key'): 'test_value' + }) + def test_should_display_value(self): + with contextlib.redirect_stdout(io.StringIO()) as temp_stdout: + config_command.get_value(self.parser.parse_args(['config', 'get-value', 'core', 'test_key'])) + + self.assertEqual("test_value", temp_stdout.getvalue().strip()) + + @mock.patch("airflow.cli.commands.config_command.conf") + def test_should_raise_exception_when_section_is_missing(self, mock_conf): + mock_conf.has_section.return_value = False + mock_conf.has_option.return_value = True + + with contextlib.redirect_stderr(io.StringIO()) as temp_stderr, self.assertRaises(SystemExit) as cm: + config_command.get_value(self.parser.parse_args( + ['config', 'get-value', 'missing-section', 'dags_folder'] + )) + self.assertEqual(1, cm.exception.code) + self.assertEqual( + "The section [missing-section] is not found in config.", temp_stderr.getvalue().strip() + ) + + @mock.patch("airflow.cli.commands.config_command.conf") + def test_should_raise_exception_when_option_is_missing(self, mock_conf): + mock_conf.has_section.return_value = True + mock_conf.has_option.return_value = False + + with contextlib.redirect_stderr(io.StringIO()) as temp_stderr, self.assertRaises(SystemExit) as cm: + config_command.get_value(self.parser.parse_args( + ['config', 'get-value', 'missing-section', 'dags_folder'] + )) + self.assertEqual(1, cm.exception.code) + self.assertEqual( + "The option [missing-section/dags_folder] is not found in config.", temp_stderr.getvalue().strip() + ) diff --git a/tests/cli/commands/test_connection_command.py b/tests/cli/commands/test_connection_command.py index 75df6bd825e7f..c6dbac8c4cc4c 100644 --- a/tests/cli/commands/test_connection_command.py +++ b/tests/cli/commands/test_connection_command.py @@ -16,6 +16,7 @@ # under the License. import io +import json import unittest from contextlib import redirect_stdout from unittest import mock @@ -111,6 +112,302 @@ def test_cli_connections_include_secrets(self): connection_command.connections_list(args) +class TestCliExportConnections(unittest.TestCase): + @provide_session + def setUp(self, session=None): + clear_db_connections(add_default_connections_back=False) + merge_conn( + Connection( + conn_id="airflow_db", + conn_type="mysql", + host="mysql", + login="root", + password="plainpassword", + schema="airflow", + ), + session + ) + merge_conn( + Connection( + conn_id="druid_broker_default", + conn_type="druid", + host="druid-broker", + port=8082, + extra='{"endpoint": "druid/v2/sql"}', + ), + session + ) + + self.parser = cli_parser.get_parser() + + def tearDown(self): + clear_db_connections() + + def test_cli_connections_export_should_return_error_for_invalid_command(self): + with self.assertRaises(SystemExit): + self.parser.parse_args([ + "connections", + "export", + ]) + + def test_cli_connections_export_should_return_error_for_invalid_format(self): + with self.assertRaises(SystemExit): + self.parser.parse_args([ + "connections", + "export", + "--format", + "invalid", + "/path/to/file" + ]) + + @mock.patch('os.path.splitext') + @mock.patch('builtins.open', new_callable=mock.mock_open()) + def test_cli_connections_export_should_return_error_for_invalid_export_format(self, + mock_file_open, + mock_splittext): + output_filepath = '/tmp/connections.invalid' + mock_splittext.return_value = (None, '.invalid') + + args = self.parser.parse_args([ + "connections", + "export", + output_filepath, + ]) + with self.assertRaisesRegex( + SystemExit, r"Unsupported file format. The file must have the extension .yaml, .json, .env" + ): + connection_command.connections_export(args) + + mock_splittext.assert_called_once() + mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None) + mock_file_open.return_value.write.assert_not_called() + + @mock.patch('os.path.splitext') + @mock.patch('builtins.open', new_callable=mock.mock_open()) + @mock.patch.object(connection_command, 'create_session') + def test_cli_connections_export_should_return_error_if_create_session_fails(self, mock_session, + mock_file_open, + mock_splittext): + output_filepath = '/tmp/connections.json' + + def my_side_effect(): + raise Exception("dummy exception") + mock_session.side_effect = my_side_effect + mock_splittext.return_value = (None, '.json') + + args = self.parser.parse_args([ + "connections", + "export", + output_filepath, + ]) + with self.assertRaisesRegex(Exception, r"dummy exception"): + connection_command.connections_export(args) + + mock_splittext.assert_not_called() + mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None) + mock_file_open.return_value.write.assert_not_called() + + @mock.patch('os.path.splitext') + @mock.patch('builtins.open', new_callable=mock.mock_open()) + @mock.patch.object(connection_command, 'create_session') + def test_cli_connections_export_should_return_error_if_fetching_connections_fails(self, mock_session, + mock_file_open, + mock_splittext): + output_filepath = '/tmp/connections.json' + + def my_side_effect(): + raise Exception("dummy exception") + mock_session.return_value.__enter__.return_value.query.return_value.all.side_effect = my_side_effect + mock_splittext.return_value = (None, '.json') + + args = self.parser.parse_args([ + "connections", + "export", + output_filepath, + ]) + with self.assertRaisesRegex(Exception, r"dummy exception"): + connection_command.connections_export(args) + + mock_splittext.assert_called_once() + mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None) + mock_file_open.return_value.write.assert_not_called() + + @mock.patch('os.path.splitext') + @mock.patch('builtins.open', new_callable=mock.mock_open()) + @mock.patch.object(connection_command, 'create_session') + def test_cli_connections_export_should_not_return_error_if_connections_is_empty(self, mock_session, + mock_file_open, + mock_splittext): + output_filepath = '/tmp/connections.json' + + mock_session.return_value.__enter__.return_value.query.return_value.all.return_value = [] + mock_splittext.return_value = (None, '.json') + + args = self.parser.parse_args([ + "connections", + "export", + output_filepath, + ]) + connection_command.connections_export(args) + + mock_splittext.assert_called_once() + mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None) + mock_file_open.return_value.write.assert_called_once_with('{}') + + @mock.patch('os.path.splitext') + @mock.patch('builtins.open', new_callable=mock.mock_open()) + def test_cli_connections_export_should_export_as_json(self, mock_file_open, mock_splittext): + output_filepath = '/tmp/connections.json' + mock_splittext.return_value = (None, '.json') + + args = self.parser.parse_args([ + "connections", + "export", + output_filepath, + ]) + connection_command.connections_export(args) + + expected_connections = json.dumps({ + "airflow_db": { + "conn_type": "mysql", + "host": "mysql", + "login": "root", + "password": "plainpassword", + "schema": "airflow", + "port": None, + "extra": None, + }, + "druid_broker_default": { + "conn_type": "druid", + "host": "druid-broker", + "login": None, + "password": None, + "schema": None, + "port": 8082, + "extra": "{\"endpoint\": \"druid/v2/sql\"}", + } + }) + + mock_splittext.assert_called_once() + mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None) + mock_file_open.return_value.write.assert_called_once_with(expected_connections) + + @mock.patch('os.path.splitext') + @mock.patch('builtins.open', new_callable=mock.mock_open()) + def test_cli_connections_export_should_export_as_yaml(self, mock_file_open, mock_splittext): + output_filepath = '/tmp/connections.yaml' + mock_splittext.return_value = (None, '.yaml') + + args = self.parser.parse_args([ + "connections", + "export", + output_filepath, + ]) + connection_command.connections_export(args) + + expected_connections = ("airflow_db:\n" + " conn_type: mysql\n" + " extra: null\n" + " host: mysql\n" + " login: root\n" + " password: plainpassword\n" + " port: null\n" + " schema: airflow\n" + "druid_broker_default:\n" + " conn_type: druid\n" + " extra: \'{\"endpoint\": \"druid/v2/sql\"}\'\n" + " host: druid-broker\n" + " login: null\n" + " password: null\n" + " port: 8082\n" + " schema: null\n") + mock_splittext.assert_called_once() + mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None) + mock_file_open.return_value.write.assert_called_once_with(expected_connections) + + @mock.patch('os.path.splitext') + @mock.patch('builtins.open', new_callable=mock.mock_open()) + def test_cli_connections_export_should_export_as_env(self, mock_file_open, mock_splittext): + output_filepath = '/tmp/connections.env' + mock_splittext.return_value = (None, '.env') + + args = self.parser.parse_args([ + "connections", + "export", + output_filepath, + ]) + connection_command.connections_export(args) + + expected_connections = ( + "airflow_db=mysql://root:plainpassword@mysql/airflow\n" + "druid_broker_default=druid://druid-broker:8082?endpoint=druid%2Fv2%2Fsql\n") + + mock_splittext.assert_called_once() + mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None) + mock_file_open.return_value.write.assert_called_once_with(expected_connections) + + @mock.patch('os.path.splitext') + @mock.patch('builtins.open', new_callable=mock.mock_open()) + def test_cli_connections_export_should_export_as_env_for_uppercase_file_extension(self, mock_file_open, + mock_splittext): + output_filepath = '/tmp/connections.ENV' + mock_splittext.return_value = (None, '.ENV') + + args = self.parser.parse_args([ + "connections", + "export", + output_filepath, + ]) + connection_command.connections_export(args) + + expected_connections = ( + "airflow_db=mysql://root:plainpassword@mysql/airflow\n" + "druid_broker_default=druid://druid-broker:8082?endpoint=druid%2Fv2%2Fsql\n") + + mock_splittext.assert_called_once() + mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None) + mock_file_open.return_value.write.assert_called_once_with(expected_connections) + + @mock.patch('os.path.splitext') + @mock.patch('builtins.open', new_callable=mock.mock_open()) + def test_cli_connections_export_should_force_export_as_specified_format(self, mock_file_open, + mock_splittext): + output_filepath = '/tmp/connections.yaml' + + args = self.parser.parse_args([ + "connections", + "export", + output_filepath, + "--format", + "json", + ]) + connection_command.connections_export(args) + + expected_connections = json.dumps({ + "airflow_db": { + "conn_type": "mysql", + "host": "mysql", + "login": "root", + "password": "plainpassword", + "schema": "airflow", + "port": None, + "extra": None, + }, + "druid_broker_default": { + "conn_type": "druid", + "host": "druid-broker", + "login": None, + "password": None, + "schema": None, + "port": 8082, + "extra": "{\"endpoint\": \"druid/v2/sql\"}", + } + }) + mock_splittext.assert_not_called() + mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None) + mock_file_open.return_value.write.assert_called_once_with(expected_connections) + + TEST_URL = "postgresql://airflow:airflow@host:5432/airflow" diff --git a/tests/cli/commands/test_legacy_commands.py b/tests/cli/commands/test_legacy_commands.py new file mode 100644 index 0000000000000..42a04ff5bb008 --- /dev/null +++ b/tests/cli/commands/test_legacy_commands.py @@ -0,0 +1,61 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import contextlib +import io +import unittest +from argparse import ArgumentError +from unittest.mock import MagicMock + +from airflow.cli import cli_parser +from airflow.cli.commands import config_command +from airflow.cli.commands.legacy_commands import COMMAND_MAP, check_legacy_command + +LEGACY_COMMANDS = ["worker", "flower", "trigger_dag", "delete_dag", "show_dag", "list_dag", + "dag_status", "backfill", "list_dag_runs", "pause", "unpause", "test", + "clear", "list_tasks", "task_failed_deps", "task_state", "run", + "render", "initdb", "resetdb", "upgradedb", "checkdb", "shell", "pool", + "list_users", "create_user", "delete_user"] + + +class TestCliDeprecatedCommandsValue(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.parser = cli_parser.get_parser() + + def test_should_display_value(self): + with self.assertRaises(SystemExit) as cm_exception, \ + contextlib.redirect_stderr(io.StringIO()) as temp_stderr: + config_command.get_value(self.parser.parse_args(['worker'])) + + self.assertEqual(2, cm_exception.exception.code) + self.assertIn( + "`airflow worker` command, has been removed, " + "please use `airflow celery worker`, see help above.", + temp_stderr.getvalue().strip() + ) + + def test_command_map(self): + for item in LEGACY_COMMANDS: + self.assertIsNotNone(COMMAND_MAP[item]) + + def test_check_legacy_command(self): + action = MagicMock() + with self.assertRaises(ArgumentError) as e: + check_legacy_command(action, 'list_users') + self.assertEqual( + str(e.exception), + "argument : `airflow list_users` command, has been removed, please use `airflow users list`") diff --git a/tests/cli/commands/test_webserver_command.py b/tests/cli/commands/test_webserver_command.py index f2a39af10a564..479a141eaf4b0 100644 --- a/tests/cli/commands/test_webserver_command.py +++ b/tests/cli/commands/test_webserver_command.py @@ -23,6 +23,7 @@ from unittest import mock import psutil +import pytest from airflow import settings from airflow.cli import cli_parser @@ -304,6 +305,7 @@ def test_cli_webserver_foreground_with_pid(self): proc.terminate() self.assertEqual(0, proc.wait(60)) + @pytest.mark.quarantined def test_cli_webserver_background(self): with tempfile.TemporaryDirectory(prefix="gunicorn") as tmpdir, \ mock.patch.dict( diff --git a/tests/dags/test_logging_in_dag.py b/tests/dags/test_logging_in_dag.py index 900e43006404f..6d828fa631d4a 100644 --- a/tests/dags/test_logging_in_dag.py +++ b/tests/dags/test_logging_in_dag.py @@ -25,6 +25,11 @@ def test_logging_fn(**kwargs): + """ + Tests DAG logging. + :param kwargs: + :return: + """ logger.info("Log from DAG Logger") kwargs["ti"].log.info("Log from TI Logger") print("Log from Print statement") diff --git a/tests/dags/test_task_view_type_check.py b/tests/dags/test_task_view_type_check.py index 6fa3e3cd42d13..fa04b5e0f5b3f 100644 --- a/tests/dags/test_task_view_type_check.py +++ b/tests/dags/test_task_view_type_check.py @@ -34,6 +34,9 @@ class CallableClass: + """ + Class that is callable. + """ def __call__(self): """ A __call__ method """ diff --git a/tests/deprecated_classes.py b/tests/deprecated_classes.py index 4545fedbf8d4a..2d53951c2a2b3 100644 --- a/tests/deprecated_classes.py +++ b/tests/deprecated_classes.py @@ -1268,7 +1268,7 @@ 'airflow.contrib.operators.winrm_operator.WinRMOperator', ), ( - 'airflow.providers.email.operators.email.EmailOperator', + 'airflow.operators.email.EmailOperator', 'airflow.operators.email_operator.EmailOperator', ), ( diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index d8a86d00d24fe..0020eadca2cb9 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -1473,8 +1473,10 @@ def test_process_executor_events(self, mock_stats_incr): scheduler.processor_agent.send_callback_to_execute.assert_called_once_with( full_filepath='/test_path1/', task_instance=mock.ANY, - msg='Executor reports task instance finished (failed) ' - 'although the task says its queued. (Info: None) Was the task killed externally?' + msg='Executor reports task instance ' + ' ' + 'finished (failed) although the task says its queued. (Info: None) ' + 'Was the task killed externally?' ) scheduler.processor_agent.reset_mock() diff --git a/tests/models/test_baseoperator.py b/tests/models/test_baseoperator.py index fa6804b567c7b..89d6605c22c4c 100644 --- a/tests/models/test_baseoperator.py +++ b/tests/models/test_baseoperator.py @@ -363,8 +363,8 @@ class CustomOp(DummyOperator): template_fields = ("field", "field2") @apply_defaults - def __init__(self, field=None, field2=None, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, field=None, field2=None, **kwargs): + super().__init__(**kwargs) self.field = field self.field2 = field2 diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py index 8891d56c8e98d..a348b71b3a649 100644 --- a/tests/models/test_dag.py +++ b/tests/models/test_dag.py @@ -1089,6 +1089,35 @@ def test_schedule_dag_relativedelta(self): dag.clear() self._clean_up(dag_id) + @patch('airflow.models.dag.Stats') + def test_dag_handle_callback_crash(self, mock_stats): + """ + Tests avoid crashes from calling dag callbacks exceptions + """ + dag_id = "test_dag_callback_crash" + mock_callback_with_exception = mock.MagicMock() + mock_callback_with_exception.side_effect = Exception + dag = DAG( + dag_id=dag_id, + # callback with invalid signature should not cause crashes + on_success_callback=lambda: 1, + on_failure_callback=mock_callback_with_exception) + dag.add_task(BaseOperator( + task_id="faketastic", + owner='Also fake', + start_date=datetime_tz(2015, 1, 2, 0, 0))) + + dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock()) + dag_run = dag_file_processor.create_dag_run(dag) + # should not rause any exception + dag.handle_callback(dag_run, success=False) + dag.handle_callback(dag_run, success=True) + + mock_stats.incr.assert_called_with("dag.callback_exceptions") + + dag.clear() + self._clean_up(dag_id) + def test_schedule_dag_fake_scheduled_previous(self): """ Test scheduling a dag where there is a prior DagRun diff --git a/tests/models/test_dagrun.py b/tests/models/test_dagrun.py index 98e8fa724cc6d..7be1854618fb1 100644 --- a/tests/models/test_dagrun.py +++ b/tests/models/test_dagrun.py @@ -565,6 +565,25 @@ def with_all_tasks_removed(dag): flaky_ti.refresh_from_db() self.assertEqual(State.NONE, flaky_ti.state) + def test_already_added_task_instances_can_be_ignored(self): + dag = DAG('triggered_dag', start_date=DEFAULT_DATE) + dag.add_task(DummyOperator(task_id='first_task', owner='test')) + + dagrun = self.create_dag_run(dag) + first_ti = dagrun.get_task_instances()[0] + self.assertEqual('first_task', first_ti.task_id) + self.assertEqual(State.NONE, first_ti.state) + + # Lets assume that the above TI was added into DB by webserver, but if scheduler + # is running the same method at the same time it would find 0 TIs for this dag + # and proceeds further to create TIs. Hence mocking DagRun.get_task_instances + # method to return an empty list of TIs. + with mock.patch.object(DagRun, 'get_task_instances') as mock_gtis: + mock_gtis.return_value = [] + dagrun.verify_integrity() + first_ti.refresh_from_db() + self.assertEqual(State.NONE, first_ti.state) + @parameterized.expand([(state,) for state in State.task_states]) @mock.patch('airflow.models.dagrun.task_instance_mutation_hook') def test_task_instance_mutation_hook(self, state, mock_hook): diff --git a/tests/providers/email/operators/test_email.py b/tests/operators/test_email.py similarity index 92% rename from tests/providers/email/operators/test_email.py rename to tests/operators/test_email.py index f4e7641db7605..867c1116a9370 100644 --- a/tests/providers/email/operators/test_email.py +++ b/tests/operators/test_email.py @@ -21,7 +21,7 @@ from unittest import mock from airflow.models.dag import DAG -from airflow.providers.email.operators.email import EmailOperator +from airflow.operators.email import EmailOperator from airflow.utils import timezone from tests.test_utils.config import conf_vars @@ -57,7 +57,7 @@ def _run_as_operator(self, **kwargs): def test_execute(self): with conf_vars( - {('email', 'email_backend'): 'tests.providers.email.operators.test_email.send_email_test'} + {('email', 'email_backend'): 'tests.operators.test_email.send_email_test'} ): self._run_as_operator() assert send_email_test.call_count == 1 diff --git a/tests/operators/test_python.py b/tests/operators/test_python.py index 6bcf712633ab7..a8a04bb1f8369 100644 --- a/tests/operators/test_python.py +++ b/tests/operators/test_python.py @@ -68,13 +68,16 @@ def build_recording_function(calls_collection): Then using this custom function recording custom Call objects for further testing (replacing Mock.assert_called_with assertion method) """ + def recording_function(*args, **kwargs): calls_collection.append(Call(*args, **kwargs)) + return recording_function class TestPythonBase(unittest.TestCase): """Base test class for TestPythonOperator and TestPythonSensor classes""" + @classmethod def setUpClass(cls): super().setUpClass() @@ -326,9 +329,11 @@ def test_python_operator_python_callable_is_callable(self): def test_fails_bad_signature(self): """Tests that @task will fail if signature is not binding.""" + @task_decorator def add_number(num: int) -> int: return num + 2 + with pytest.raises(TypeError): add_number(2, 3) # pylint: disable=too-many-function-args with pytest.raises(TypeError): @@ -345,12 +350,14 @@ class Test: @task_decorator def add_number(self, num: int) -> int: return self.num + num + Test().add_number(2) def test_fail_multiple_outputs_key_type(self): @task_decorator(multiple_outputs=True) def add_number(num: int): return {2: num} + with self.dag: ret = add_number(2) self.dag.create_dagrun( @@ -450,6 +457,7 @@ def test_manual_task_id(self): @task_decorator(task_id='some_name') def do_run(): return 4 + with self.dag: do_run() assert ['some_name'] == self.dag.task_ids @@ -460,6 +468,7 @@ def test_multiple_calls(self): @task_decorator def do_run(): return 4 + with self.dag: do_run() assert ['do_run'] == self.dag.task_ids @@ -472,6 +481,7 @@ def do_run(): def test_call_20(self): """Test calling decorated function 21 times in a DAG""" + @task_decorator def __do_run(): return 4 @@ -513,6 +523,7 @@ def return_dict(number: int): def test_default_args(self): """Test that default_args are captured when calling the function correctly""" + @task_decorator def do_run(): return 4 @@ -1060,22 +1071,21 @@ def _run_as_operator(self, fn, python_version=sys.version_info[0], **kwargs): dag=self.dag, **kwargs) task.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) + return task - def test_dill_warning(self): + def test_add_dill(self): def f(): pass - with self.assertRaises(AirflowException): - PythonVirtualenvOperator( - python_callable=f, - task_id='task', - dag=self.dag, - use_dill=True, - system_site_packages=False) + + task = self._run_as_operator(f, use_dill=True, system_site_packages=False) + assert 'dill' in task.requirements def test_no_requirements(self): """Tests that the python callable is invoked on task run.""" + def f(): pass + self._run_as_operator(f) def test_no_system_site_packages(self): @@ -1085,11 +1095,13 @@ def f(): except ImportError: return True raise Exception + self._run_as_operator(f, system_site_packages=False, requirements=['dill']) def test_system_site_packages(self): def f(): import funcsigs # noqa: F401 # pylint: disable=redefined-outer-name,reimported,unused-import + self._run_as_operator(f, requirements=['funcsigs'], system_site_packages=True) def test_with_requirements_pinned(self): @@ -1106,30 +1118,35 @@ def f(): def test_unpinned_requirements(self): def f(): import funcsigs # noqa: F401 # pylint: disable=redefined-outer-name,reimported,unused-import + self._run_as_operator( f, requirements=['funcsigs', 'dill'], system_site_packages=False) def test_range_requirements(self): def f(): import funcsigs # noqa: F401 # pylint: disable=redefined-outer-name,reimported,unused-import + self._run_as_operator( f, requirements=['funcsigs>1.0', 'dill'], system_site_packages=False) def test_fail(self): def f(): raise Exception + with self.assertRaises(CalledProcessError): self._run_as_operator(f) def test_python_2(self): def f(): {}.iteritems() # pylint: disable=no-member + self._run_as_operator(f, python_version=2, requirements=['dill']) def test_python_2_7(self): def f(): {}.iteritems() # pylint: disable=no-member return True + self._run_as_operator(f, python_version='2.7', requirements=['dill']) def test_python_3(self): @@ -1141,6 +1158,7 @@ def f(): except AttributeError: return raise Exception + self._run_as_operator(f, python_version=3, use_dill=False, requirements=['dill']) @staticmethod @@ -1165,6 +1183,7 @@ def f(): def test_without_dill(self): def f(a): return a + self._run_as_operator(f, system_site_packages=False, use_dill=False, op_args=[4]) def test_string_args(self): @@ -1173,6 +1192,7 @@ def f(): print(virtualenv_string_args) if virtualenv_string_args[0] != virtualenv_string_args[2]: raise Exception + self._run_as_operator( f, python_version=self._invert_python_major_version(), string_args=[1, 2, 1]) @@ -1182,11 +1202,13 @@ def f(a, b, c=False, d=False): return True else: raise Exception + self._run_as_operator(f, op_args=[0, 1], op_kwargs={'c': True}) def test_return_none(self): def f(): return None + self._run_as_operator(f) def test_lambda(self): @@ -1199,13 +1221,132 @@ def test_lambda(self): def test_nonimported_as_arg(self): def f(_): return None + self._run_as_operator(f, op_args=[datetime.utcnow()]) def test_context(self): def f(templates_dict): return templates_dict['ds'] + self._run_as_operator(f, templates_dict={'ds': '{{ ds }}'}) + def test_airflow_context(self): + def f( + # basic + ds_nodash, + inlets, + next_ds, + next_ds_nodash, + outlets, + params, + prev_ds, + prev_ds_nodash, + run_id, + task_instance_key_str, + test_mode, + tomorrow_ds, + tomorrow_ds_nodash, + ts, + ts_nodash, + ts_nodash_with_tz, + yesterday_ds, + yesterday_ds_nodash, + # pendulum-specific + execution_date, + next_execution_date, + prev_execution_date, + prev_execution_date_success, + prev_start_date_success, + # airflow-specific + macros, + conf, + dag, + dag_run, + task, + # other + **context + ): # pylint: disable=unused-argument,too-many-arguments,too-many-locals + pass + + self._run_as_operator( + f, + use_dill=True, + system_site_packages=True, + requirements=None + ) + + def test_pendulum_context(self): + def f( + # basic + ds_nodash, + inlets, + next_ds, + next_ds_nodash, + outlets, + params, + prev_ds, + prev_ds_nodash, + run_id, + task_instance_key_str, + test_mode, + tomorrow_ds, + tomorrow_ds_nodash, + ts, + ts_nodash, + ts_nodash_with_tz, + yesterday_ds, + yesterday_ds_nodash, + # pendulum-specific + execution_date, + next_execution_date, + prev_execution_date, + prev_execution_date_success, + prev_start_date_success, + # other + **context + ): # pylint: disable=unused-argument,too-many-arguments,too-many-locals + pass + + self._run_as_operator( + f, + use_dill=True, + system_site_packages=False, + requirements=['pendulum', 'lazy_object_proxy'] + ) + + def test_base_context(self): + def f( + # basic + ds_nodash, + inlets, + next_ds, + next_ds_nodash, + outlets, + params, + prev_ds, + prev_ds_nodash, + run_id, + task_instance_key_str, + test_mode, + tomorrow_ds, + tomorrow_ds_nodash, + ts, + ts_nodash, + ts_nodash_with_tz, + yesterday_ds, + yesterday_ds_nodash, + # other + **context + ): # pylint: disable=unused-argument,too-many-arguments,too-many-locals + pass + + self._run_as_operator( + f, + use_dill=True, + system_site_packages=False, + requirements=None + ) + DEFAULT_ARGS = { "owner": "test", diff --git a/tests/providers/amazon/aws/operators/test_athena.py b/tests/providers/amazon/aws/operators/test_athena.py index 109c98346281f..0871e9fd352b3 100644 --- a/tests/providers/amazon/aws/operators/test_athena.py +++ b/tests/providers/amazon/aws/operators/test_athena.py @@ -72,8 +72,7 @@ def test_init(self): self.assertEqual(self.athena.client_request_token, MOCK_DATA['client_request_token']) self.assertEqual(self.athena.sleep_time, 0) - hook = self.athena.get_hook() - self.assertEqual(hook.sleep_time, 0) + self.assertEqual(self.athena.hook.sleep_time, 0) @mock.patch.object(AWSAthenaHook, 'check_query_status', side_effect=("SUCCESS",)) @mock.patch.object(AWSAthenaHook, 'run_query', return_value=ATHENA_QUERY_ID) diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_processing.py b/tests/providers/amazon/aws/operators/test_sagemaker_processing.py new file mode 100644 index 0000000000000..db75cde1cebc3 --- /dev/null +++ b/tests/providers/amazon/aws/operators/test_sagemaker_processing.py @@ -0,0 +1,167 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import unittest + +import mock +from parameterized import parameterized + +from airflow.exceptions import AirflowException +from airflow.providers.amazon.aws.hooks.sagemaker import SageMakerHook +from airflow.providers.amazon.aws.operators.sagemaker_processing import SageMakerProcessingOperator + +job_name = 'test-job-name' + +create_processing_params = { + "AppSpecification": { + "ContainerArguments": ["container_arg"], + "ContainerEntrypoint": ["container_entrypoint"], + "ImageUri": "{{ image_uri }}", + }, + "Environment": {"{{ key }}": "{{ value }}"}, + "ExperimentConfig": { + "ExperimentName": "ExperimentName", + "TrialComponentDisplayName": "TrialComponentDisplayName", + "TrialName": "TrialName", + }, + "ProcessingInputs": [ + { + "InputName": "AnalyticsInputName", + "S3Input": { + "LocalPath": "{{ Local Path }}", + "S3CompressionType": "None", + "S3DataDistributionType": "FullyReplicated", + "S3DataType": "S3Prefix", + "S3InputMode": "File", + "S3Uri": "{{ S3Uri }}", + }, + } + ], + "ProcessingJobName": job_name, + "ProcessingOutputConfig": { + "KmsKeyId": "KmsKeyID", + "Outputs": [ + { + "OutputName": "AnalyticsOutputName", + "S3Output": { + "LocalPath": "{{ Local Path }}", + "S3UploadMode": "EndOfJob", + "S3Uri": "{{ S3Uri }}", + }, + } + ], + }, + "ProcessingResources": { + "ClusterConfig": { + "InstanceCount": 2, + "InstanceType": "ml.p2.xlarge", + "VolumeSizeInGB": 30, + "VolumeKmsKeyId": "{{ kms_key }}", + } + }, + "RoleArn": "arn:aws:iam::0122345678910:role/SageMakerPowerUser", + "Tags": [{"{{ key }}": "{{ value }}"}], +} + +create_processing_params_with_stopping_condition = create_processing_params.copy() +create_processing_params_with_stopping_condition.update(StoppingCondition={"MaxRuntimeInSeconds": 3600}) + + +class TestSageMakerProcessingOperator(unittest.TestCase): + + def setUp(self): + self.processing_config_kwargs = dict(task_id='test_sagemaker_operator', + aws_conn_id='sagemaker_test_id', + wait_for_completion=False, + check_interval=5) + + @parameterized.expand([ + (create_processing_params, [['ProcessingResources', 'ClusterConfig', 'InstanceCount'], + ['ProcessingResources', 'ClusterConfig', 'VolumeSizeInGB']]), + (create_processing_params_with_stopping_condition, [ + ['ProcessingResources', 'ClusterConfig', 'InstanceCount'], + ['ProcessingResources', 'ClusterConfig', 'VolumeSizeInGB'], + ['StoppingCondition', 'MaxRuntimeInSeconds']])]) + def test_integer_fields_are_set(self, config, expected_fields): + sagemaker = SageMakerProcessingOperator(**self.processing_config_kwargs, config=config) + assert sagemaker.integer_fields == expected_fields + + @mock.patch.object(SageMakerHook, 'get_conn') + @mock.patch.object(SageMakerHook, 'create_processing_job', + return_value={'ProcessingJobArn': 'testarn', + 'ResponseMetadata': {'HTTPStatusCode': 200}}) + def test_execute(self, mock_processing, mock_client): + sagemaker = SageMakerProcessingOperator(**self.processing_config_kwargs, + config=create_processing_params) + sagemaker.execute(None) + mock_processing.assert_called_once_with(create_processing_params, + wait_for_completion=False, + check_interval=5, + max_ingestion_time=None + ) + + @mock.patch.object(SageMakerHook, 'get_conn') + @mock.patch.object(SageMakerHook, 'create_processing_job', + return_value={'ProcessingJobArn': 'testarn', + 'ResponseMetadata': {'HTTPStatusCode': 404}}) + def test_execute_with_failure(self, mock_processing, mock_client): + sagemaker = SageMakerProcessingOperator(**self.processing_config_kwargs, + config=create_processing_params) + self.assertRaises(AirflowException, sagemaker.execute, None) + + @mock.patch.object(SageMakerHook, "get_conn") + @mock.patch.object(SageMakerHook, "list_processing_jobs", + return_value=[{"ProcessingJobName": job_name}]) + @mock.patch.object(SageMakerHook, "create_processing_job", + return_value={"ResponseMetadata": {"HTTPStatusCode": 200}}) + def test_execute_with_existing_job_increment( + self, mock_create_processing_job, mock_list_processing_jobs, mock_client + ): + sagemaker = SageMakerProcessingOperator(**self.processing_config_kwargs, + config=create_processing_params) + sagemaker.action_if_job_exists = "increment" + sagemaker.execute(None) + + expected_config = create_processing_params.copy() + # Expect to see ProcessingJobName suffixed with "-2" because we return one existing job + expected_config["ProcessingJobName"] = f"{job_name}-2" + mock_create_processing_job.assert_called_once_with( + expected_config, + wait_for_completion=False, + check_interval=5, + max_ingestion_time=None, + ) + + @mock.patch.object(SageMakerHook, "get_conn") + @mock.patch.object(SageMakerHook, "list_processing_jobs", + return_value=[{"ProcessingJobName": job_name}]) + @mock.patch.object(SageMakerHook, "create_processing_job", + return_value={"ResponseMetadata": {"HTTPStatusCode": 200}}) + def test_execute_with_existing_job_fail( + self, mock_create_processing_job, mock_list_processing_jobs, mock_client + ): + sagemaker = SageMakerProcessingOperator(**self.processing_config_kwargs, + config=create_processing_params) + sagemaker.action_if_job_exists = "fail" + self.assertRaises(AirflowException, sagemaker.execute, None) + + @mock.patch.object(SageMakerHook, "get_conn") + def test_action_if_job_exists_validation(self, mock_client): + sagemaker = SageMakerProcessingOperator(**self.processing_config_kwargs, + config=create_processing_params) + self.assertRaises(AirflowException, sagemaker.__init__, + action_if_job_exists="not_fail_or_increment") diff --git a/tests/providers/amazon/aws/sensors/test_athena.py b/tests/providers/amazon/aws/sensors/test_athena.py index e798e56d78641..c28084edc4225 100644 --- a/tests/providers/amazon/aws/sensors/test_athena.py +++ b/tests/providers/amazon/aws/sensors/test_athena.py @@ -31,7 +31,7 @@ def setUp(self): self.sensor = AthenaSensor(task_id='test_athena_sensor', query_execution_id='abc', sleep_time=5, - max_retires=1, + max_retries=1, aws_conn_id='aws_default') @mock.patch.object(AWSAthenaHook, 'poll_query_status', side_effect=("SUCCEEDED",)) diff --git a/tests/providers/amazon/aws/sensors/test_s3_keys_unchanged.py b/tests/providers/amazon/aws/sensors/test_s3_keys_unchanged.py new file mode 100644 index 0000000000000..504c0a7446841 --- /dev/null +++ b/tests/providers/amazon/aws/sensors/test_s3_keys_unchanged.py @@ -0,0 +1,97 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from datetime import datetime +from unittest import TestCase, mock + +from freezegun import freeze_time +from parameterized import parameterized + +from airflow.models.dag import DAG, AirflowException +from airflow.providers.amazon.aws.sensors.s3_keys_unchanged import S3KeysUnchangedSensor + +TEST_DAG_ID = 'unit_tests_aws_sensor' +DEFAULT_DATE = datetime(2015, 1, 1) + + +class TestS3KeysUnchangedSensor(TestCase): + + def setUp(self): + args = { + 'owner': 'airflow', + 'start_date': DEFAULT_DATE, + } + dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args) + dag.schedule_interval = '@once' + self.dag = dag + + self.sensor = S3KeysUnchangedSensor( + task_id='sensor_1', + bucket_name='test-bucket', + prefix='test-prefix/path', + inactivity_period=12, + poke_interval=0.1, + min_objects=1, + allow_delete=True, + dag=self.dag + ) + + def test_reschedule_mode_not_allowed(self): + with self.assertRaises(ValueError): + S3KeysUnchangedSensor( + task_id='sensor_2', + bucket_name='test-bucket', + prefix='test-prefix/path', + poke_interval=0.1, + mode='reschedule', + dag=self.dag + ) + + @freeze_time(DEFAULT_DATE, auto_tick_seconds=10) + def test_files_deleted_between_pokes_throw_error(self): + self.sensor.allow_delete = False + self.sensor.is_keys_unchanged({'a', 'b'}) + with self.assertRaises(AirflowException): + self.sensor.is_keys_unchanged({'a'}) + + @parameterized.expand([ + # Test: resetting inactivity period after key change + (({'a'}, {'a', 'b'}, {'a', 'b', 'c'}), (False, False, False), (0, 0, 0)), + # ..and in case an item was deleted with option `allow_delete=True` + (({'a', 'b'}, {'a'}, {'a', 'c'}), (False, False, False), (0, 0, 0)), + # Test: passes after inactivity period was exceeded + (({'a'}, {'a'}, {'a'}), (False, False, True), (0, 10, 20)), + # ..and do not pass if empty key is given + ((set(), set(), set()), (False, False, False), (0, 10, 20)) + ]) + @freeze_time(DEFAULT_DATE, auto_tick_seconds=10) + def test_key_changes(self, current_objects, expected_returns, inactivity_periods): + self.assertEqual(self.sensor.is_keys_unchanged(current_objects[0]), expected_returns[0]) + self.assertEqual(self.sensor.inactivity_seconds, inactivity_periods[0]) + self.assertEqual(self.sensor.is_keys_unchanged(current_objects[1]), expected_returns[1]) + self.assertEqual(self.sensor.inactivity_seconds, inactivity_periods[1]) + self.assertEqual(self.sensor.is_keys_unchanged(current_objects[2]), expected_returns[2]) + self.assertEqual(self.sensor.inactivity_seconds, inactivity_periods[2]) + + @freeze_time(DEFAULT_DATE, auto_tick_seconds=10) + @mock.patch('airflow.providers.amazon.aws.sensors.s3_keys_unchanged.S3Hook') + def test_poke_succeeds_on_upload_complete(self, mock_hook): + mock_hook.return_value.list_keys.return_value = {'a'} + self.assertFalse(self.sensor.poke(dict())) + self.assertFalse(self.sensor.poke(dict())) + self.assertTrue(self.sensor.poke(dict())) diff --git a/tests/providers/cncf/kubernetes/operators/test_kubernetes_pod.py b/tests/providers/cncf/kubernetes/operators/test_kubernetes_pod.py index 15b62a79691fa..917d51c87722d 100644 --- a/tests/providers/cncf/kubernetes/operators/test_kubernetes_pod.py +++ b/tests/providers/cncf/kubernetes/operators/test_kubernetes_pod.py @@ -131,3 +131,16 @@ def test_pod_delete_even_on_launcher_error( context = self.create_context(k) k.execute(context=context) assert delete_pod_mock.called + + def test_jinja_templated_fields(self): + task = KubernetesPodOperator( + namespace='default', + image="{{ image_jinja }}:16.04", + cmds=["bash", "-cx"], + name="test_pod", + task_id="task", + ) + + self.assertEqual(task.image, "{{ image_jinja }}:16.04") + task.render_template_fields(context={"image_jinja": "ubuntu"}) + self.assertEqual(task.image, "ubuntu:16.04") diff --git a/tests/providers/google/cloud/hooks/test_dataproc.py b/tests/providers/google/cloud/hooks/test_dataproc.py index 282722a08011a..08f9d608d0f9a 100644 --- a/tests/providers/google/cloud/hooks/test_dataproc.py +++ b/tests/providers/google/cloud/hooks/test_dataproc.py @@ -148,6 +148,7 @@ def test_diagnose_cluster(self, mock_client): retry=None, timeout=None, ) + mock_client.return_value.diagnose_cluster.return_value.result.assert_called_once_with() @mock.patch(DATAPROC_STRING.format("DataprocHook.get_cluster_client")) def test_get_cluster(self, mock_client): diff --git a/tests/providers/google/cloud/hooks/test_datastore.py b/tests/providers/google/cloud/hooks/test_datastore.py index 93dd663bf482f..51591a848533c 100644 --- a/tests/providers/google/cloud/hooks/test_datastore.py +++ b/tests/providers/google/cloud/hooks/test_datastore.py @@ -86,12 +86,17 @@ def test_allocate_ids_no_project_id(self, mock_get_conn, mock_project_id): def test_begin_transaction(self, mock_get_conn): self.datastore_hook.connection = mock_get_conn.return_value - transaction = self.datastore_hook.begin_transaction(project_id=GCP_PROJECT_ID) + transaction = self.datastore_hook.begin_transaction( + project_id=GCP_PROJECT_ID, + transaction_options={}, + ) projects = self.datastore_hook.connection.projects projects.assert_called_once_with() begin_transaction = projects.return_value.beginTransaction - begin_transaction.assert_called_once_with(projectId=GCP_PROJECT_ID, body={}) + begin_transaction.assert_called_once_with( + projectId=GCP_PROJECT_ID, body={'transactionOptions': {}} + ) execute = begin_transaction.return_value.execute execute.assert_called_once_with(num_retries=mock.ANY) self.assertEqual(transaction, execute.return_value['transaction']) diff --git a/tests/providers/google/cloud/hooks/test_pubsub.py b/tests/providers/google/cloud/hooks/test_pubsub.py index f2087818dae72..dc637fb40459c 100644 --- a/tests/providers/google/cloud/hooks/test_pubsub.py +++ b/tests/providers/google/cloud/hooks/test_pubsub.py @@ -190,6 +190,11 @@ def test_create_nonexistent_subscription(self, mock_service): retain_acked_messages=None, message_retention_duration=None, labels=LABELS, + enable_message_ordering=False, + expiration_policy=None, + filter_=None, + dead_letter_policy=None, + retry_policy=None, retry=None, timeout=None, metadata=None, @@ -216,6 +221,11 @@ def test_create_subscription_different_project_topic(self, mock_service): retain_acked_messages=None, message_retention_duration=None, labels=LABELS, + enable_message_ordering=False, + expiration_policy=None, + filter_=None, + dead_letter_policy=None, + retry_policy=None, retry=None, timeout=None, metadata=None, @@ -271,6 +281,11 @@ def test_create_subscription_without_subscription_name(self, mock_uuid, retain_acked_messages=None, message_retention_duration=None, labels=LABELS, + enable_message_ordering=False, + expiration_policy=None, + filter_=None, + dead_letter_policy=None, + retry_policy=None, retry=None, timeout=None, metadata=None, @@ -292,6 +307,40 @@ def test_create_subscription_with_ack_deadline(self, mock_service): retain_acked_messages=None, message_retention_duration=None, labels=LABELS, + enable_message_ordering=False, + expiration_policy=None, + filter_=None, + dead_letter_policy=None, + retry_policy=None, + retry=None, + timeout=None, + metadata=None, + ) + self.assertEqual(TEST_SUBSCRIPTION, response) + + @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) + def test_create_subscription_with_filter(self, mock_service): + create_method = mock_service.create_subscription + + response = self.pubsub_hook.create_subscription( + project_id=TEST_PROJECT, + topic=TEST_TOPIC, + subscription=TEST_SUBSCRIPTION, + filter_='attributes.domain="com"' + ) + create_method.assert_called_once_with( + name=EXPANDED_SUBSCRIPTION, + topic=EXPANDED_TOPIC, + push_config=None, + ack_deadline_seconds=10, + retain_acked_messages=None, + message_retention_duration=None, + labels=LABELS, + enable_message_ordering=False, + expiration_policy=None, + filter_='attributes.domain="com"', + dead_letter_policy=None, + retry_policy=None, retry=None, timeout=None, metadata=None, diff --git a/tests/providers/google/cloud/operators/test_dataproc.py b/tests/providers/google/cloud/operators/test_dataproc.py index dbf015b2a3382..bf1a38c7e8aa6 100644 --- a/tests/providers/google/cloud/operators/test_dataproc.py +++ b/tests/providers/google/cloud/operators/test_dataproc.py @@ -21,9 +21,10 @@ from typing import Any from unittest import mock -from google.api_core.exceptions import AlreadyExists +from google.api_core.exceptions import AlreadyExists, NotFound from google.api_core.retry import Retry +from airflow import AirflowException from airflow.providers.google.cloud.operators.dataproc import ( ClusterGenerator, DataprocCreateClusterOperator, DataprocDeleteClusterOperator, DataprocInstantiateInlineWorkflowTemplateOperator, DataprocInstantiateWorkflowTemplateOperator, @@ -225,6 +226,7 @@ def test_execute(self, mock_hook): @mock.patch(DATAPROC_PATH.format("DataprocHook")) def test_execute_if_cluster_exists(self, mock_hook): mock_hook.return_value.create_cluster.side_effect = [AlreadyExists("test")] + mock_hook.return_value.get_cluster.return_value.status.state = 0 op = DataprocCreateClusterOperator( task_id=TASK_ID, region=GCP_LOCATION, @@ -256,6 +258,97 @@ def test_execute_if_cluster_exists(self, mock_hook): metadata=METADATA, ) + @mock.patch(DATAPROC_PATH.format("DataprocHook")) + def test_execute_if_cluster_exists_do_not_use(self, mock_hook): + mock_hook.return_value.create_cluster.side_effect = [AlreadyExists("test")] + mock_hook.return_value.get_cluster.return_value.status.state = 0 + op = DataprocCreateClusterOperator( + task_id=TASK_ID, + region=GCP_LOCATION, + project_id=GCP_PROJECT, + cluster=CLUSTER, + gcp_conn_id=GCP_CONN_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + request_id=REQUEST_ID, + use_if_exists=False + ) + with self.assertRaises(AlreadyExists): + op.execute(context={}) + + @mock.patch(DATAPROC_PATH.format("DataprocHook")) + def test_execute_if_cluster_exists_in_error_state(self, mock_hook): + mock_hook.return_value.create_cluster.side_effect = [AlreadyExists("test")] + cluster_status = mock_hook.return_value.get_cluster.return_value.status + cluster_status.state = 0 + cluster_status.ERROR = 0 + + op = DataprocCreateClusterOperator( + task_id=TASK_ID, + region=GCP_LOCATION, + project_id=GCP_PROJECT, + cluster=CLUSTER, + delete_on_error=True, + gcp_conn_id=GCP_CONN_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + request_id=REQUEST_ID, + ) + with self.assertRaises(AirflowException): + op.execute(context={}) + + mock_hook.return_value.diagnose_cluster.assert_called_once_with( + region=GCP_LOCATION, + project_id=GCP_PROJECT, + cluster_name=CLUSTER_NAME, + ) + mock_hook.return_value.delete_cluster.assert_called_once_with( + region=GCP_LOCATION, + project_id=GCP_PROJECT, + cluster_name=CLUSTER_NAME, + ) + + @mock.patch(DATAPROC_PATH.format("exponential_sleep_generator")) + @mock.patch(DATAPROC_PATH.format("DataprocCreateClusterOperator._create_cluster")) + @mock.patch(DATAPROC_PATH.format("DataprocCreateClusterOperator._get_cluster")) + @mock.patch(DATAPROC_PATH.format("DataprocHook")) + def test_execute_if_cluster_exists_in_deleting_state( + self, mock_hook, mock_get_cluster, mock_create_cluster, mock_generator + ): + cluster = mock.MagicMock() + cluster.status.state = 0 + cluster.status.DELETING = 0 + + cluster2 = mock.MagicMock() + cluster2.status.state = 0 + cluster2.status.ERROR = 0 + + mock_create_cluster.side_effect = [AlreadyExists("test"), cluster2] + mock_generator.return_value = [0] + mock_get_cluster.side_effect = [cluster, NotFound("test")] + + op = DataprocCreateClusterOperator( + task_id=TASK_ID, + region=GCP_LOCATION, + project_id=GCP_PROJECT, + cluster=CLUSTER, + delete_on_error=True, + gcp_conn_id=GCP_CONN_ID, + ) + with self.assertRaises(AirflowException): + op.execute(context={}) + + calls = [mock.call(mock_hook.return_value), mock.call(mock_hook.return_value)] + mock_get_cluster.assert_has_calls(calls) + mock_create_cluster.assert_has_calls(calls) + mock_hook.return_value.diagnose_cluster.assert_called_once_with( + region=GCP_LOCATION, + project_id=GCP_PROJECT, + cluster_name=CLUSTER_NAME, + ) + class TestDataprocClusterScaleOperator(unittest.TestCase): def test_deprecation_warning(self): diff --git a/tests/providers/google/cloud/operators/test_datastore.py b/tests/providers/google/cloud/operators/test_datastore.py new file mode 100644 index 0000000000000..c097e58a65b2c --- /dev/null +++ b/tests/providers/google/cloud/operators/test_datastore.py @@ -0,0 +1,206 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from unittest import mock + +from airflow.providers.google.cloud.operators.datastore import ( + CloudDatastoreAllocateIdsOperator, CloudDatastoreBeginTransactionOperator, CloudDatastoreCommitOperator, + CloudDatastoreDeleteOperationOperator, CloudDatastoreExportEntitiesOperator, + CloudDatastoreGetOperationOperator, CloudDatastoreImportEntitiesOperator, CloudDatastoreRollbackOperator, + CloudDatastoreRunQueryOperator, +) + +HOOK_PATH = "airflow.providers.google.cloud.operators.datastore.DatastoreHook" +PROJECT_ID = "test-project" +CONN_ID = "test-gcp-conn-id" +BODY = {"key", "value"} +TRANSACTION = "transaction-name" +BUCKET = "gs://test-bucket" +FILE = "filename" +OPERATION_ID = "1234" + + +class TestCloudDatastoreExportEntitiesOperator: + @mock.patch(HOOK_PATH) + def test_execute(self, mock_hook): + mock_hook.return_value.export_to_storage_bucket.return_value = { + "name": OPERATION_ID + } + mock_hook.return_value.poll_operation_until_done.return_value = { + "metadata": {"common": {"state": "SUCCESSFUL"}} + } + + op = CloudDatastoreExportEntitiesOperator( + task_id="test_task", + datastore_conn_id=CONN_ID, + project_id=PROJECT_ID, + bucket=BUCKET, + ) + op.execute({}) + + mock_hook.assert_called_once_with(CONN_ID, None) + mock_hook.return_value.export_to_storage_bucket.assert_called_once_with( + project_id=PROJECT_ID, + bucket=BUCKET, + entity_filter=None, + labels=None, + namespace=None, + ) + + mock_hook.return_value.poll_operation_until_done.assert_called_once_with( + OPERATION_ID, 10 + ) + + +class TestCloudDatastoreImportEntitiesOperator: + @mock.patch(HOOK_PATH) + def test_execute(self, mock_hook): + mock_hook.return_value.import_from_storage_bucket.return_value = { + "name": OPERATION_ID + } + mock_hook.return_value.poll_operation_until_done.return_value = { + "metadata": {"common": {"state": "SUCCESSFUL"}} + } + + op = CloudDatastoreImportEntitiesOperator( + task_id="test_task", + datastore_conn_id=CONN_ID, + project_id=PROJECT_ID, + bucket=BUCKET, + file=FILE, + ) + op.execute({}) + + mock_hook.assert_called_once_with(CONN_ID, None) + mock_hook.return_value.import_from_storage_bucket.assert_called_once_with( + project_id=PROJECT_ID, + bucket=BUCKET, + file=FILE, + entity_filter=None, + labels=None, + namespace=None, + ) + + mock_hook.return_value.export_to_storage_bucketassert_called_once_with( + OPERATION_ID, 10 + ) + + +class TestCloudDatastoreAllocateIds: + @mock.patch(HOOK_PATH) + def test_execute(self, mock_hook): + partial_keys = [1, 2, 3] + op = CloudDatastoreAllocateIdsOperator( + task_id="test_task", + gcp_conn_id=CONN_ID, + project_id=PROJECT_ID, + partial_keys=partial_keys, + ) + op.execute({}) + + mock_hook.assert_called_once_with(gcp_conn_id=CONN_ID) + mock_hook.return_value.allocate_ids.assert_called_once_with( + project_id=PROJECT_ID, partial_keys=partial_keys + ) + + +class TestCloudDatastoreBeginTransaction: + @mock.patch(HOOK_PATH) + def test_execute(self, mock_hook): + op = CloudDatastoreBeginTransactionOperator( + task_id="test_task", + gcp_conn_id=CONN_ID, + project_id=PROJECT_ID, + transaction_options=BODY, + ) + op.execute({}) + + mock_hook.assert_called_once_with(gcp_conn_id=CONN_ID) + mock_hook.return_value.begin_transaction.assert_called_once_with( + project_id=PROJECT_ID, transaction_options=BODY + ) + + +class TestCloudDatastoreCommit: + @mock.patch(HOOK_PATH) + def test_execute(self, mock_hook): + op = CloudDatastoreCommitOperator( + task_id="test_task", gcp_conn_id=CONN_ID, project_id=PROJECT_ID, body=BODY + ) + op.execute({}) + + mock_hook.assert_called_once_with(gcp_conn_id=CONN_ID) + mock_hook.return_value.commit.assert_called_once_with( + project_id=PROJECT_ID, body=BODY + ) + + +class TestCloudDatastoreDeleteOperation: + @mock.patch(HOOK_PATH) + def test_execute(self, mock_hook): + op = CloudDatastoreDeleteOperationOperator( + task_id="test_task", gcp_conn_id=CONN_ID, name=TRANSACTION + ) + op.execute({}) + + mock_hook.assert_called_once_with(gcp_conn_id=CONN_ID) + mock_hook.return_value.delete_operation.assert_called_once_with( + name=TRANSACTION + ) + + +class TestCloudDatastoreGetOperation: + @mock.patch(HOOK_PATH) + def test_execute(self, mock_hook): + op = CloudDatastoreGetOperationOperator( + task_id="test_task", gcp_conn_id=CONN_ID, name=TRANSACTION + ) + op.execute({}) + + mock_hook.assert_called_once_with(gcp_conn_id=CONN_ID) + mock_hook.return_value.get_operation.assert_called_once_with(name=TRANSACTION) + + +class TestCloudDatastoreRollback: + @mock.patch(HOOK_PATH) + def test_execute(self, mock_hook): + op = CloudDatastoreRollbackOperator( + task_id="test_task", + gcp_conn_id=CONN_ID, + project_id=PROJECT_ID, + transaction=TRANSACTION, + ) + op.execute({}) + + mock_hook.assert_called_once_with(gcp_conn_id=CONN_ID) + mock_hook.return_value.rollback.assert_called_once_with( + project_id=PROJECT_ID, transaction=TRANSACTION + ) + + +class TestCloudDatastoreRunQuery: + @mock.patch(HOOK_PATH) + def test_execute(self, mock_hook): + op = CloudDatastoreRunQueryOperator( + task_id="test_task", gcp_conn_id=CONN_ID, project_id=PROJECT_ID, body=BODY + ) + op.execute({}) + + mock_hook.assert_called_once_with(gcp_conn_id=CONN_ID) + mock_hook.return_value.run_query.assert_called_once_with( + project_id=PROJECT_ID, body=BODY + ) diff --git a/tests/providers/google/cloud/operators/test_datastore_system.py b/tests/providers/google/cloud/operators/test_datastore_system.py index 961ba88796b5d..7cbeaf1c0f367 100644 --- a/tests/providers/google/cloud/operators/test_datastore_system.py +++ b/tests/providers/google/cloud/operators/test_datastore_system.py @@ -42,3 +42,7 @@ def tearDown(self): @provide_gcp_context(GCP_DATASTORE_KEY) def test_run_example_dag(self): self.run_dag('example_gcp_datastore', CLOUD_DAG_FOLDER) + + @provide_gcp_context(GCP_DATASTORE_KEY) + def test_run_example_dag_operations(self): + self.run_dag('example_gcp_datastore_operations', CLOUD_DAG_FOLDER) diff --git a/tests/providers/google/cloud/operators/test_kubernetes_engine.py b/tests/providers/google/cloud/operators/test_kubernetes_engine.py index 52d5ee5431432..b0cdb48eec282 100644 --- a/tests/providers/google/cloud/operators/test_kubernetes_engine.py +++ b/tests/providers/google/cloud/operators/test_kubernetes_engine.py @@ -83,7 +83,7 @@ def test_create_execute_error_body(self, body, mock_hook): body=body, task_id=PROJECT_TASK_ID) - # pylint: disable=no-value-for-parameter + # pylint: disable=missing-kwoa @mock.patch('airflow.providers.google.cloud.operators.kubernetes_engine.GKEHook') def test_create_execute_error_project_id(self, mock_hook): with self.assertRaises(AirflowException): @@ -118,7 +118,7 @@ def test_delete_execute_error_project_id(self, mock_hook): name=CLUSTER_NAME, task_id=PROJECT_TASK_ID) - # pylint: disable=no-value-for-parameter + # pylint: disable=missing-kwoa @mock.patch('airflow.providers.google.cloud.operators.kubernetes_engine.GKEHook') def test_delete_execute_error_cluster_name(self, mock_hook): with self.assertRaises(AirflowException): @@ -126,7 +126,7 @@ def test_delete_execute_error_cluster_name(self, mock_hook): location=PROJECT_LOCATION, task_id=PROJECT_TASK_ID) - # pylint: disable=no-value-for-parameter + # pylint: disable=missing-kwoa @mock.patch('airflow.providers.google.cloud.operators.kubernetes_engine.GKEHook') def test_delete_execute_error_location(self, mock_hook): with self.assertRaises(AirflowException): diff --git a/tests/providers/google/cloud/operators/test_pubsub.py b/tests/providers/google/cloud/operators/test_pubsub.py index e9265da48665e..f3d2eaa776653 100644 --- a/tests/providers/google/cloud/operators/test_pubsub.py +++ b/tests/providers/google/cloud/operators/test_pubsub.py @@ -129,6 +129,11 @@ def test_execute(self, mock_hook): retain_acked_messages=None, message_retention_duration=None, labels=None, + enable_message_ordering=False, + expiration_policy=None, + filter_=None, + dead_letter_policy=None, + retry_policy=None, retry=None, timeout=None, metadata=None, @@ -158,6 +163,11 @@ def test_execute_different_project_ids(self, mock_hook): retain_acked_messages=None, message_retention_duration=None, labels=None, + enable_message_ordering=False, + expiration_policy=None, + filter_=None, + dead_letter_policy=None, + retry_policy=None, retry=None, timeout=None, metadata=None @@ -184,6 +194,11 @@ def test_execute_no_subscription(self, mock_hook): retain_acked_messages=None, message_retention_duration=None, labels=None, + enable_message_ordering=False, + expiration_policy=None, + filter_=None, + dead_letter_policy=None, + retry_policy=None, retry=None, timeout=None, metadata=None, diff --git a/tests/providers/google/cloud/operators/test_speech_to_text.py b/tests/providers/google/cloud/operators/test_speech_to_text.py index 867121a46591e..61543a71057a4 100644 --- a/tests/providers/google/cloud/operators/test_speech_to_text.py +++ b/tests/providers/google/cloud/operators/test_speech_to_text.py @@ -48,7 +48,7 @@ def test_missing_config(self, mock_hook): mock_hook.return_value.recognize_speech.return_value = True with self.assertRaises(AirflowException) as e: - CloudSpeechToTextRecognizeSpeechOperator( # pylint: disable=no-value-for-parameter + CloudSpeechToTextRecognizeSpeechOperator( # pylint: disable=missing-kwoa project_id=PROJECT_ID, gcp_conn_id=GCP_CONN_ID, audio=AUDIO, task_id="id" ).execute(context={"task_instance": Mock()}) @@ -61,7 +61,7 @@ def test_missing_audio(self, mock_hook): mock_hook.return_value.recognize_speech.return_value = True with self.assertRaises(AirflowException) as e: - CloudSpeechToTextRecognizeSpeechOperator( # pylint: disable=no-value-for-parameter + CloudSpeechToTextRecognizeSpeechOperator( # pylint: disable=missing-kwoa project_id=PROJECT_ID, gcp_conn_id=GCP_CONN_ID, config=CONFIG, task_id="id" ).execute(context={"task_instance": Mock()}) diff --git a/tests/providers/google/cloud/sensors/test_gcs.py b/tests/providers/google/cloud/sensors/test_gcs.py index e353238d8ebe4..0e39c7b0346e0 100644 --- a/tests/providers/google/cloud/sensors/test_gcs.py +++ b/tests/providers/google/cloud/sensors/test_gcs.py @@ -212,11 +212,22 @@ def setUp(self): poke_interval=10, min_objects=1, allow_delete=False, + google_cloud_conn_id=TEST_GCP_CONN_ID, + delegate_to=TEST_DELEGATE_TO, dag=self.dag ) self.last_mocked_date = datetime(2019, 4, 24, 0, 0, 0) + @mock.patch("airflow.providers.google.cloud.sensors.gcs.GCSHook") + def test_get_gcs_hook(self, mock_hook): + self.sensor._get_gcs_hook() + mock_hook.assert_called_once_with( + gcp_conn_id=TEST_GCP_CONN_ID, + delegate_to=TEST_DELEGATE_TO, + ) + self.assertEqual(mock_hook.return_value, self.sensor.hook) + @mock.patch('airflow.providers.google.cloud.sensors.gcs.get_time', mock_time) def test_files_deleted_between_pokes_throw_error(self): self.sensor.is_bucket_updated({'a', 'b'}) diff --git a/tests/providers/google/cloud/utils/test_field_sanitizer.py b/tests/providers/google/cloud/utils/test_field_sanitizer.py new file mode 100644 index 0000000000000..91cac96355dc6 --- /dev/null +++ b/tests/providers/google/cloud/utils/test_field_sanitizer.py @@ -0,0 +1,234 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import unittest +from copy import deepcopy + +from airflow.providers.google.cloud.utils.field_sanitizer import GcpBodyFieldSanitizer + + +class TestGcpBodyFieldSanitizer(unittest.TestCase): + def test_sanitize_should_sanitize_empty_body_and_fields(self): + body = {} + fields_to_sanitize = [] + + sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) + sanitizer.sanitize(body) + + self.assertEqual({}, body) + + def test_sanitize_should_not_fail_with_none_body(self): + body = None + fields_to_sanitize = [] + + sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) + sanitizer.sanitize(body) + + self.assertIsNone(body) + + def test_sanitize_should_fail_with_none_fields(self): + body = {} + fields_to_sanitize = None + + sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) + + with self.assertRaises(TypeError): + sanitizer.sanitize(body) + + def test_sanitize_should_not_fail_if_field_is_absent_in_body(self): + body = {} + fields_to_sanitize = ["kind"] + + sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) + sanitizer.sanitize(body) + + self.assertEqual({}, body) + + def test_sanitize_should_not_remove_fields_for_incorrect_specification(self): + actual_body = [ + {"kind": "compute#instanceTemplate", "name": "instance"}, + {"kind": "compute#instanceTemplate1", "name": "instance1"}, + {"kind": "compute#instanceTemplate2", "name": "instance2"}, + ] + body = deepcopy(actual_body) + fields_to_sanitize = ["kind"] + + sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) + sanitizer.sanitize(body) + + self.assertEqual(actual_body, body) + + def test_sanitize_should_remove_all_fields_from_root_level(self): + body = {"kind": "compute#instanceTemplate", "name": "instance"} + fields_to_sanitize = ["kind"] + + sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) + sanitizer.sanitize(body) + + self.assertEqual({"name": "instance"}, body) + + def test_sanitize_should_remove_for_multiple_fields_from_root_level(self): + body = {"kind": "compute#instanceTemplate", "name": "instance"} + fields_to_sanitize = ["kind", "name"] + + sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) + sanitizer.sanitize(body) + + self.assertEqual({}, body) + + def test_sanitize_should_remove_all_fields_in_a_list_value(self): + body = {"fields": [ + {"kind": "compute#instanceTemplate", "name": "instance"}, + {"kind": "compute#instanceTemplate1", "name": "instance1"}, + {"kind": "compute#instanceTemplate2", "name": "instance2"}, + ]} + fields_to_sanitize = ["fields.kind"] + + sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) + sanitizer.sanitize(body) + + self.assertEqual({"fields": [ + {"name": "instance"}, + {"name": "instance1"}, + {"name": "instance2"}, + ]}, body) + + def test_sanitize_should_remove_all_fields_in_any_nested_body(self): + fields_to_sanitize = [ + "kind", + "properties.disks.kind", + "properties.metadata.kind", + ] + + body = { + "kind": "compute#instanceTemplate", + "name": "instance", + "properties": { + "disks": [ + { + "name": "a", + "kind": "compute#attachedDisk", + "type": "PERSISTENT", + "mode": "READ_WRITE", + }, + { + "name": "b", + "kind": "compute#attachedDisk", + "type": "PERSISTENT", + "mode": "READ_WRITE", + } + ], + "metadata": { + "kind": "compute#metadata", + "fingerprint": "GDPUYxlwHe4=" + }, + } + } + sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) + sanitizer.sanitize(body) + + self.assertEqual({ + "name": "instance", + "properties": { + "disks": [ + { + "name": "a", + "type": "PERSISTENT", + "mode": "READ_WRITE" + }, + { + "name": "b", + "type": "PERSISTENT", + "mode": "READ_WRITE" + } + ], + "metadata": { + "fingerprint": "GDPUYxlwHe4=" + } + } + }, body) + + def test_sanitize_should_not_fail_if_specification_has_none_value(self): + fields_to_sanitize = [ + "kind", + "properties.disks.kind", + "properties.metadata.kind", + ] + + body = { + "kind": "compute#instanceTemplate", + "name": "instance", + "properties": { + "disks": None + } + } + + sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) + sanitizer.sanitize(body) + + self.assertEqual({ + "name": "instance", + "properties": { + "disks": None + } + }, body) + + def test_sanitize_should_not_fail_if_no_specification_matches(self): + fields_to_sanitize = [ + "properties.disks.kind1", + "properties.metadata.kind2", + ] + + body = { + "name": "instance", + "properties": { + "disks": None + } + } + + sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) + sanitizer.sanitize(body) + + self.assertEqual({ + "name": "instance", + "properties": { + "disks": None + } + }, body) + + def test_sanitize_should_not_fail_if_type_in_body_do_not_match_with_specification(self): + fields_to_sanitize = [ + "properties.disks.kind", + "properties.metadata.kind2", + ] + + body = { + "name": "instance", + "properties": { + "disks": 1 + } + } + + sanitizer = GcpBodyFieldSanitizer(fields_to_sanitize) + sanitizer.sanitize(body) + + self.assertEqual({ + "name": "instance", + "properties": { + "disks": 1 + } + }, body) diff --git a/tests/providers/google/cloud/utils/test_field_validator.py b/tests/providers/google/cloud/utils/test_field_validator.py new file mode 100644 index 0000000000000..c57ae7df43e98 --- /dev/null +++ b/tests/providers/google/cloud/utils/test_field_validator.py @@ -0,0 +1,293 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import unittest + +from airflow.providers.google.cloud.utils.field_validator import ( + GcpBodyFieldValidator, GcpFieldValidationException, GcpValidationSpecificationException, +) + + +class TestGcpBodyFieldValidator(unittest.TestCase): + def test_validate_should_not_raise_exception_if_field_and_body_are_both_empty(self): + specification = [] + body = {} + + validator = GcpBodyFieldValidator(specification, 'v1') + + self.assertIsNone(validator.validate(body)) + + def test_validate_should_fail_if_body_is_none(self): + specification = [] + body = None + + validator = GcpBodyFieldValidator(specification, 'v1') + + with self.assertRaises(AttributeError): + validator.validate(body) + + def test_validate_should_fail_if_specification_is_none(self): + specification = None + body = {} + + validator = GcpBodyFieldValidator(specification, 'v1') + + with self.assertRaises(TypeError): + validator.validate(body) + + def test_validate_should_raise_exception_name_attribute_is_missing_from_specs(self): + specification = [dict(allow_empty=False)] + body = {} + + validator = GcpBodyFieldValidator(specification, 'v1') + + with self.assertRaises(KeyError): + validator.validate(body) + + def test_validate_should_raise_exception_if_field_is_not_present(self): + specification = [dict(name="name", allow_empty=False)] + body = {} + + validator = GcpBodyFieldValidator(specification, 'v1') + + with self.assertRaises(GcpFieldValidationException): + validator.validate(body) + + def test_validate_should_validate_a_single_field(self): + specification = [dict(name="name", allow_empty=False)] + body = {"name": "bigquery"} + + validator = GcpBodyFieldValidator(specification, 'v1') + + self.assertIsNone(validator.validate(body)) + + def test_validate_should_fail_if_body_is_not_a_dict(self): + specification = [dict(name="name", allow_empty=False)] + body = [{"name": "bigquery"}] + + validator = GcpBodyFieldValidator(specification, 'v1') + + with self.assertRaises(AttributeError): + validator.validate(body) + + def test_validate_should_fail_for_set_allow_empty_when_field_is_none(self): + specification = [dict(name="name", allow_empty=True)] + body = {"name": None} + + validator = GcpBodyFieldValidator(specification, 'v1') + + with self.assertRaises(GcpFieldValidationException): + validator.validate(body) + + def test_validate_should_interpret_allow_empty_clause(self): + specification = [dict(name="name", allow_empty=True)] + body = {"name": ""} + + validator = GcpBodyFieldValidator(specification, 'v1') + + self.assertIsNone(validator.validate(body)) + + def test_validate_should_raise_if_empty_clause_is_false(self): + specification = [dict(name="name", allow_empty=False)] + body = {"name": None} + + validator = GcpBodyFieldValidator(specification, 'v1') + + with self.assertRaises(GcpFieldValidationException): + validator.validate(body) + + def test_validate_should_raise_if_version_mismatch_is_found(self): + specification = [dict(name="name", allow_empty=False, api_version='v2')] + body = {"name": "value"} + + validator = GcpBodyFieldValidator(specification, 'v1') + + validator.validate(body) + + def test_validate_should_interpret_optional_irrespective_of_allow_empty(self): + specification = [dict(name="name", allow_empty=False, optional=True)] + body = {"name": None} + + validator = GcpBodyFieldValidator(specification, 'v1') + + self.assertIsNone(validator.validate(body)) + + def test_validate_should_interpret_optional_clause(self): + specification = [dict(name="name", allow_empty=False, optional=True)] + body = {} + + validator = GcpBodyFieldValidator(specification, 'v1') + + self.assertIsNone(validator.validate(body)) + + def test_validate_should_raise_exception_if_optional_clause_is_false_and_field_not_present(self): + specification = [dict(name="name", allow_empty=False, optional=False)] + body = {} + + validator = GcpBodyFieldValidator(specification, 'v1') + + with self.assertRaises(GcpFieldValidationException): + validator.validate(body) + + def test_validate_should_interpret_dict_type(self): + specification = [dict(name="labels", optional=True, type="dict")] + body = {"labels": {"one": "value"}} + + validator = GcpBodyFieldValidator(specification, 'v1') + + self.assertIsNone(validator.validate(body)) + + def test_validate_should_fail_if_value_is_not_dict_as_per_specs(self): + specification = [dict(name="labels", optional=True, type="dict")] + body = {"labels": 1} + + validator = GcpBodyFieldValidator(specification, 'v1') + + with self.assertRaises(GcpFieldValidationException): + validator.validate(body) + + def test_validate_should_not_allow_both_type_and_allow_empty_in_a_spec(self): + specification = [dict(name="labels", optional=True, type="dict", allow_empty=True)] + body = {"labels": 1} + + validator = GcpBodyFieldValidator(specification, 'v1') + + with self.assertRaises(GcpValidationSpecificationException): + validator.validate(body) + + def test_validate_should_allow_type_and_optional_in_a_spec(self): + specification = [dict(name="labels", optional=True, type="dict")] + body = {"labels": {}} + + validator = GcpBodyFieldValidator(specification, 'v1') + + self.assertIsNone(validator.validate(body)) + + def test_validate_should_fail_if_union_field_is_not_found(self): + specification = [ + dict(name="an_union", type="union", optional=False, fields=[ + dict(name="variant_1", regexp=r'^.+$', optional=False, allow_empty=False), + ]) + ] + body = {} + + validator = GcpBodyFieldValidator(specification, 'v1') + self.assertIsNone(validator.validate(body)) + + def test_validate_should_fail_if_there_is_no_nested_field_for_union(self): + specification = [ + dict(name="an_union", type="union", optional=False, fields=[]) + ] + body = {} + + validator = GcpBodyFieldValidator(specification, 'v1') + + with self.assertRaises(GcpValidationSpecificationException): + validator.validate(body) + + def test_validate_should_interpret_union_with_one_field(self): + specification = [ + dict(name="an_union", type="union", fields=[ + dict(name="variant_1", regexp=r'^.+$'), + ]) + ] + body = {"variant_1": "abc", "variant_2": "def"} + + validator = GcpBodyFieldValidator(specification, 'v1') + self.assertIsNone(validator.validate(body)) + + def test_validate_should_fail_if_both_field_of_union_is_present(self): + specification = [ + dict(name="an_union", type="union", fields=[ + dict(name="variant_1", regexp=r'^.+$'), + dict(name="variant_2", regexp=r'^.+$'), + ]) + ] + body = {"variant_1": "abc", "variant_2": "def"} + + validator = GcpBodyFieldValidator(specification, 'v1') + with self.assertRaises(GcpFieldValidationException): + validator.validate(body) + + def test_validate_should_validate_when_value_matches_regex(self): + specification = [ + dict(name="an_union", type="union", fields=[ + dict(name="variant_1", regexp=r'[^a-z]'), + ]) + ] + body = {"variant_1": "12"} + + validator = GcpBodyFieldValidator(specification, 'v1') + self.assertIsNone(validator.validate(body)) + + def test_validate_should_fail_when_value_does_not_match_regex(self): + specification = [ + dict(name="an_union", type="union", fields=[ + dict(name="variant_1", regexp=r'[^a-z]'), + ]) + ] + body = {"variant_1": "abc"} + + validator = GcpBodyFieldValidator(specification, 'v1') + with self.assertRaises(GcpFieldValidationException): + validator.validate(body) + + def test_validate_should_raise_if_custom_validation_is_not_true(self): + def _int_equal_to_zero(value): + if int(value) != 0: + raise GcpFieldValidationException("The available memory has to be equal to 0") + + specification = [ + dict(name="availableMemoryMb", custom_validation=_int_equal_to_zero) + ] + body = {"availableMemoryMb": 1} + + validator = GcpBodyFieldValidator(specification, 'v1') + with self.assertRaises(GcpFieldValidationException): + validator.validate(body) + + def test_validate_should_not_raise_if_custom_validation_is_true(self): + def _int_equal_to_zero(value): + if int(value) != 0: + raise GcpFieldValidationException("The available memory has to be equal to 0") + + specification = [ + dict(name="availableMemoryMb", custom_validation=_int_equal_to_zero) + ] + body = {"availableMemoryMb": 0} + + validator = GcpBodyFieldValidator(specification, 'v1') + self.assertIsNone(validator.validate(body)) + + def test_validate_should_validate_group_of_specs(self): + specification = [ + dict(name="name", allow_empty=False), + dict(name="description", allow_empty=False, optional=True), + dict(name="labels", optional=True, type="dict"), + dict(name="an_union", type="union", fields=[ + dict(name="variant_1", regexp=r'^.+$'), + dict(name="variant_2", regexp=r'^.+$', api_version='v1beta2'), + dict(name="variant_3", type="dict", fields=[ + dict(name="url", regexp=r'^.+$') + ]), + dict(name="variant_4") + ]), + ] + body = {"variant_1": "abc", "name": "bigquery"} + + validator = GcpBodyFieldValidator(specification, 'v1') + validator.validate(body) diff --git a/tests/providers/google/cloud/utils/test_mlengine_prediction_summary.py b/tests/providers/google/cloud/utils/test_mlengine_prediction_summary.py new file mode 100644 index 0000000000000..ae9e0e74fb7cf --- /dev/null +++ b/tests/providers/google/cloud/utils/test_mlengine_prediction_summary.py @@ -0,0 +1,95 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import base64 +import binascii +import unittest + +import dill +import mock + +try: + from airflow.providers.google.cloud.utils import mlengine_prediction_summary +except ImportError as e: + if 'apache_beam' in str(e): + raise unittest.SkipTest(f"package apache_beam not present. Skipping all tests in {__name__}") + + +class TestJsonCode(unittest.TestCase): + def test_encode(self): + self.assertEqual(b'{"a": 1}', mlengine_prediction_summary.JsonCoder.encode({'a': 1})) + + def test_decode(self): + self.assertEqual({'a': 1}, mlengine_prediction_summary.JsonCoder.decode('{"a": 1}')) + + +class TestMakeSummary(unittest.TestCase): + def test_make_summary(self): + print(mlengine_prediction_summary.MakeSummary(1, lambda x: x, [])) + + def test_run_without_all_arguments_should_raise_exception(self): + with self.assertRaises(SystemExit): + mlengine_prediction_summary.run() + + with self.assertRaises(SystemExit): + mlengine_prediction_summary.run([ + "--prediction_path=some/path", + ]) + + with self.assertRaises(SystemExit): + mlengine_prediction_summary.run([ + "--prediction_path=some/path", + "--metric_fn_encoded=encoded_text", + ]) + + def test_run_should_fail_for_invalid_encoded_fn(self): + with self.assertRaises(binascii.Error): + mlengine_prediction_summary.run([ + "--prediction_path=some/path", + "--metric_fn_encoded=invalid_encoded_text", + "--metric_keys=a", + ]) + + def test_run_should_fail_if_enc_fn_is_not_callable(self): + non_callable_value = 1 + fn_enc = base64.b64encode(dill.dumps(non_callable_value)).decode('utf-8') + + with self.assertRaises(ValueError): + mlengine_prediction_summary.run([ + "--prediction_path=some/path", + "--metric_fn_encoded=" + fn_enc, + "--metric_keys=a", + ]) + + @mock.patch.object(mlengine_prediction_summary.beam.pipeline, "PipelineOptions") + @mock.patch.object(mlengine_prediction_summary.beam, "Pipeline") + @mock.patch.object(mlengine_prediction_summary.beam.io, "ReadFromText") + def test_run_should_not_fail_with_valid_fn(self, io_mock, pipeline_obj_mock, pipeline_mock): + def metric_function(): + return 1 + + fn_enc = base64.b64encode(dill.dumps(metric_function)).decode('utf-8') + + mlengine_prediction_summary.run([ + "--prediction_path=some/path", + "--metric_fn_encoded=" + fn_enc, + "--metric_keys=a", + ]) + + pipeline_mock.assert_called_once_with([]) + pipeline_obj_mock.assert_called_once() + io_mock.assert_called_once() diff --git a/tests/providers/google/marketing_platform/hooks/test_analytics.py b/tests/providers/google/marketing_platform/hooks/test_analytics.py index 5fc518823a0d5..c073e2916c881 100644 --- a/tests/providers/google/marketing_platform/hooks/test_analytics.py +++ b/tests/providers/google/marketing_platform/hooks/test_analytics.py @@ -26,7 +26,9 @@ ACCOUNT_ID = "the_knight_who_says_ni!" DATA_SOURCE = "Monthy Python" API_VERSION = "v3" -GCP_CONN_ID = "google_cloud_default" +GCP_CONN_ID = "test_gcp_conn_id" +DELEGATE_TO = "TEST_DELEGATE_TO" +IMPERSONATION_CHAIN = ["ACCOUNT_1", "ACCOUNT_2", "ACCOUNT_3"] class TestGoogleAnalyticsHook(unittest.TestCase): @@ -37,6 +39,21 @@ def setUp(self): ): self.hook = GoogleAnalyticsHook(API_VERSION, GCP_CONN_ID) + @mock.patch("airflow.providers.google.common.hooks.base_google.GoogleBaseHook.__init__") + def test_init(self, mock_base_init): + hook = GoogleAnalyticsHook( + API_VERSION, + GCP_CONN_ID, + delegate_to=DELEGATE_TO, + impersonation_chain=IMPERSONATION_CHAIN, + ) + mock_base_init.assert_called_once_with( + GCP_CONN_ID, + delegate_to=DELEGATE_TO, + impersonation_chain=IMPERSONATION_CHAIN, + ) + self.assertEqual(hook.api_version, API_VERSION) + @mock.patch( "airflow.providers.google.marketing_platform.hooks." "analytics.GoogleAnalyticsHook._authorize" diff --git a/tests/providers/google/marketing_platform/operators/test_display_video_system.py b/tests/providers/google/marketing_platform/operators/test_display_video_system.py index 7d4358db2d59c..64ee8d8b15883 100644 --- a/tests/providers/google/marketing_platform/operators/test_display_video_system.py +++ b/tests/providers/google/marketing_platform/operators/test_display_video_system.py @@ -47,3 +47,11 @@ def tearDown(self): @provide_gcp_context(GMP_KEY, scopes=SCOPES) def test_run_example_dag(self): self.run_dag('example_display_video', MARKETING_DAG_FOLDER) + + @provide_gcp_context(GMP_KEY, scopes=SCOPES) + def test_run_example_dag_misc(self): + self.run_dag('example_display_video_misc', MARKETING_DAG_FOLDER) + + @provide_gcp_context(GMP_KEY, scopes=SCOPES) + def test_run_example_dag_sdf(self): + self.run_dag('example_display_video_sdf', MARKETING_DAG_FOLDER) diff --git a/tests/providers/jira/sensors/test_jira.py b/tests/providers/jira/sensors/test_jira.py index 59a3c0c6306d0..782eabbd875d5 100644 --- a/tests/providers/jira/sensors/test_jira.py +++ b/tests/providers/jira/sensors/test_jira.py @@ -64,6 +64,7 @@ def test_issue_label_set(self, jira_mock): jira_mock.return_value.issue.return_value = minimal_test_ticket ticket_label_sensor = JiraTicketSensor( + method_name='issue', task_id='search-ticket-test', ticket_id='TEST-1226', field_checker_func=TestJiraSensor.field_checker_func, diff --git a/tests/providers/microsoft/azure/hooks/test_wasb.py b/tests/providers/microsoft/azure/hooks/test_wasb.py index 46fe8e5cc319d..87b1c44b0c8e7 100644 --- a/tests/providers/microsoft/azure/hooks/test_wasb.py +++ b/tests/providers/microsoft/azure/hooks/test_wasb.py @@ -188,3 +188,13 @@ def test_delete_multiple_nonexisting_blobs_fails(self, mock_service): is_prefix=True, ignore_if_missing=False ) self.assertIsInstance(context.exception, AirflowException) + + @mock.patch('airflow.providers.microsoft.azure.hooks.wasb.BlockBlobService', + autospec=True) + def test_get_blobs_list(self, mock_service): + mock_instance = mock_service.return_value + hook = WasbHook(wasb_conn_id='wasb_test_sas_token') + hook.get_blobs_list('container', 'prefix', num_results=1, timeout=3) + mock_instance.list_blobs.assert_called_once_with( + 'container', 'prefix', num_results=1, timeout=3 + ) diff --git a/airflow/providers/email/__init__.py b/tests/providers/microsoft/mssql/hooks/__init__.py similarity index 100% rename from airflow/providers/email/__init__.py rename to tests/providers/microsoft/mssql/hooks/__init__.py diff --git a/tests/providers/microsoft/mssql/hooks/test_mssql.py b/tests/providers/microsoft/mssql/hooks/test_mssql.py new file mode 100644 index 0000000000000..39f4baf41b59e --- /dev/null +++ b/tests/providers/microsoft/mssql/hooks/test_mssql.py @@ -0,0 +1,73 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import unittest + +import mock + +from airflow import PY38 +from airflow.models import Connection + +if not PY38: + from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook + +PYMSSQL_CONN = Connection(host='ip', schema='share', login='username', password='password', port=8081) + + +class TestMsSqlHook(unittest.TestCase): + @unittest.skipIf(PY38, "Mssql package not available when Python >= 3.8.") + @mock.patch('airflow.providers.microsoft.mssql.hooks.mssql.MsSqlHook.get_conn') + @mock.patch('airflow.hooks.dbapi_hook.DbApiHook.get_connection') + def test_get_conn_should_return_connection(self, get_connection, mssql_get_conn): + get_connection.return_value = PYMSSQL_CONN + mssql_get_conn.return_value = mock.Mock() + + hook = MsSqlHook() + conn = hook.get_conn() + + self.assertEqual(mssql_get_conn.return_value, conn) + mssql_get_conn.assert_called_once() + + @unittest.skipIf(PY38, "Mssql package not available when Python >= 3.8.") + @mock.patch('airflow.providers.microsoft.mssql.hooks.mssql.MsSqlHook.get_conn') + @mock.patch('airflow.hooks.dbapi_hook.DbApiHook.get_connection') + def test_set_autocommit_should_invoke_autocommit(self, get_connection, mssql_get_conn): + get_connection.return_value = PYMSSQL_CONN + mssql_get_conn.return_value = mock.Mock() + autocommit_value = mock.Mock() + + hook = MsSqlHook() + conn = hook.get_conn() + + hook.set_autocommit(conn, autocommit_value) + mssql_get_conn.assert_called_once() + mssql_get_conn.return_value.autocommit.assert_called_once_with(autocommit_value) + + @unittest.skipIf(PY38, "Mssql package not available when Python >= 3.8.") + @mock.patch('airflow.providers.microsoft.mssql.hooks.mssql.MsSqlHook.get_conn') + @mock.patch('airflow.hooks.dbapi_hook.DbApiHook.get_connection') + def test_get_autocommit_should_return_autocommit_state(self, get_connection, mssql_get_conn): + get_connection.return_value = PYMSSQL_CONN + mssql_get_conn.return_value = mock.Mock() + mssql_get_conn.return_value.autocommit_state = 'autocommit_state' + + hook = MsSqlHook() + conn = hook.get_conn() + + mssql_get_conn.assert_called_once() + self.assertEqual(hook.get_autocommit(conn), 'autocommit_state') diff --git a/tests/providers/pagerduty/hooks/test_pagerduty.py b/tests/providers/pagerduty/hooks/test_pagerduty.py index 254d5c0a31cda..f614e33f73a4c 100644 --- a/tests/providers/pagerduty/hooks/test_pagerduty.py +++ b/tests/providers/pagerduty/hooks/test_pagerduty.py @@ -28,8 +28,9 @@ class TestPagerdutyHook(unittest.TestCase): + @classmethod @provide_session - def setUp(self, session=None): + def setUpClass(cls, session=None): session.add(Connection( conn_id=DEFAULT_CONN_ID, conn_type='http', diff --git a/airflow/providers/email/operators/__init__.py b/tests/providers/samba/__init__.py similarity index 100% rename from airflow/providers/email/operators/__init__.py rename to tests/providers/samba/__init__.py diff --git a/tests/providers/samba/hooks/__init__.py b/tests/providers/samba/hooks/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/tests/providers/samba/hooks/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/samba/hooks/test_samba.py b/tests/providers/samba/hooks/test_samba.py new file mode 100644 index 0000000000000..cf63ef662305d --- /dev/null +++ b/tests/providers/samba/hooks/test_samba.py @@ -0,0 +1,116 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import unittest +from unittest.mock import call + +import mock +import smbclient + +from airflow.exceptions import AirflowException +from airflow.models import Connection +from airflow.providers.samba.hooks.samba import SambaHook + +connection = Connection(host='ip', schema='share', login='username', password='password') + + +class TestSambaHook(unittest.TestCase): + def test_get_conn_should_fail_if_conn_id_does_not_exist(self): + with self.assertRaises(AirflowException): + SambaHook('conn') + + @mock.patch('airflow.hooks.base_hook.BaseHook.get_connection') + def test_get_conn(self, get_conn_mock): + get_conn_mock.return_value = connection + hook = SambaHook('samba_default') + + self.assertEqual(smbclient.SambaClient, type(hook.get_conn())) + get_conn_mock.assert_called_once_with('samba_default') + + @mock.patch('airflow.providers.samba.hooks.samba.SambaHook.get_conn') + @mock.patch('airflow.hooks.base_hook.BaseHook.get_connection') + def test_push_from_local_should_succeed_if_destination_has_same_name_but_not_a_file(self, base_conn_mock, + samba_hook_mock): + base_conn_mock.return_value = connection + samba_hook_mock.get_conn.return_value = mock.Mock() + + samba_hook_mock.return_value.exists.return_value = True + samba_hook_mock.return_value.isfile.return_value = False + samba_hook_mock.return_value.exists.return_value = True + + hook = SambaHook('samba_default') + destination_filepath = "/path/to/dest/file" + local_filepath = "/path/to/local/file" + hook.push_from_local(destination_filepath=destination_filepath, local_filepath=local_filepath) + + base_conn_mock.assert_called_once_with('samba_default') + samba_hook_mock.assert_called_once() + samba_hook_mock.return_value.exists.assert_called_once_with(destination_filepath) + samba_hook_mock.return_value.isfile.assert_called_once_with(destination_filepath) + samba_hook_mock.return_value.remove.assert_not_called() + samba_hook_mock.return_value.upload.assert_called_once_with(local_filepath, destination_filepath) + + @mock.patch('airflow.providers.samba.hooks.samba.SambaHook.get_conn') + @mock.patch('airflow.hooks.base_hook.BaseHook.get_connection') + def test_push_from_local_should_delete_file_if_exists_and_save_file(self, base_conn_mock, + samba_hook_mock): + base_conn_mock.return_value = connection + samba_hook_mock.get_conn.return_value = mock.Mock() + + samba_hook_mock.return_value.exists.return_value = False + samba_hook_mock.return_value.exists.return_value = False + + hook = SambaHook('samba_default') + destination_folder = "/path/to/dest" + destination_filepath = destination_folder + "/file" + local_filepath = "/path/to/local/file" + hook.push_from_local(destination_filepath=destination_filepath, local_filepath=local_filepath) + + base_conn_mock.assert_called_once_with('samba_default') + samba_hook_mock.assert_called_once() + samba_hook_mock.return_value.exists.assert_has_calls([call(destination_filepath), + call(destination_folder)]) + samba_hook_mock.return_value.isfile.assert_not_called() + samba_hook_mock.return_value.remove.assert_not_called() + samba_hook_mock.return_value.mkdir.assert_called_once_with(destination_folder) + samba_hook_mock.return_value.upload.assert_called_once_with(local_filepath, destination_filepath) + + @mock.patch('airflow.providers.samba.hooks.samba.SambaHook.get_conn') + @mock.patch('airflow.hooks.base_hook.BaseHook.get_connection') + def test_push_from_local_should_create_directory_if_not_exist_and_save_file(self, base_conn_mock, + samba_hook_mock): + base_conn_mock.return_value = connection + samba_hook_mock.get_conn.return_value = mock.Mock() + + samba_hook_mock.return_value.exists.return_value = False + samba_hook_mock.return_value.exists.return_value = False + + hook = SambaHook('samba_default') + destination_folder = "/path/to/dest" + destination_filepath = destination_folder + "/file" + local_filepath = "/path/to/local/file" + hook.push_from_local(destination_filepath=destination_filepath, local_filepath=local_filepath) + + base_conn_mock.assert_called_once_with('samba_default') + samba_hook_mock.assert_called_once() + samba_hook_mock.return_value.exists.assert_has_calls([call(destination_filepath), + call(destination_folder)]) + samba_hook_mock.return_value.isfile.assert_not_called() + samba_hook_mock.return_value.remove.assert_not_called() + samba_hook_mock.return_value.mkdir.assert_called_once_with(destination_folder) + samba_hook_mock.return_value.upload.assert_called_once_with(local_filepath, destination_filepath) diff --git a/tests/providers/slack/hooks/test_slack.py b/tests/providers/slack/hooks/test_slack.py index c6a8ad2d4f5ca..93349483cebd3 100644 --- a/tests/providers/slack/hooks/test_slack.py +++ b/tests/providers/slack/hooks/test_slack.py @@ -41,8 +41,9 @@ def test_get_token_with_token_only(self): expected = test_token self.assertEqual(output, expected) + @mock.patch('airflow.providers.slack.hooks.slack.WebClient') @mock.patch('airflow.providers.slack.hooks.slack.SlackHook.get_connection') - def test_get_token_with_valid_slack_conn_id_only(self, get_connection_mock): + def test_get_token_with_valid_slack_conn_id_only(self, get_connection_mock, mock_slack_client): """ tests `__get_token` method when only connection is provided """ # Given test_token = None @@ -59,6 +60,7 @@ def test_get_token_with_valid_slack_conn_id_only(self, get_connection_mock): output = hook.token expected = test_password self.assertEqual(output, expected) + mock_slack_client.assert_called_once_with(test_password) @mock.patch('airflow.providers.slack.hooks.slack.SlackHook.get_connection') def test_get_token_with_no_password_slack_conn_id_only(self, get_connection_mock): diff --git a/tests/providers/sqlite/hooks/test_sqlite.py b/tests/providers/sqlite/hooks/test_sqlite.py index 8fdcaae062436..833bb8837bd5a 100644 --- a/tests/providers/sqlite/hooks/test_sqlite.py +++ b/tests/providers/sqlite/hooks/test_sqlite.py @@ -32,7 +32,7 @@ def setUp(self): self.connection = Connection(host='host') class UnitTestSqliteHook(SqliteHook): - conn_name_attr = 'sqlite_conn_id' + conn_name_attr = 'test_conn_id' self.db_hook = UnitTestSqliteHook() self.db_hook.get_connection = mock.Mock() @@ -43,6 +43,13 @@ def test_get_conn(self, mock_connect): self.db_hook.get_conn() mock_connect.assert_called_once_with('host') + @patch('airflow.providers.sqlite.hooks.sqlite.sqlite3.connect') + def test_get_conn_non_default_id(self, mock_connect): + self.db_hook.test_conn_id = 'non_default' # pylint: disable=attribute-defined-outside-init + self.db_hook.get_conn() + mock_connect.assert_called_once_with('host') + self.db_hook.get_connection.assert_called_once_with('non_default') + class TestSqliteHook(unittest.TestCase): diff --git a/tests/secrets/test_local_filesystem.py b/tests/secrets/test_local_filesystem.py index 6f58850f9e089..97f6d423b9a33 100644 --- a/tests/secrets/test_local_filesystem.py +++ b/tests/secrets/test_local_filesystem.py @@ -24,7 +24,7 @@ from parameterized import parameterized -from airflow.exceptions import AirflowException, AirflowFileParseException +from airflow.exceptions import AirflowException, AirflowFileParseException, ConnectionNotUnique from airflow.secrets import local_filesystem from airflow.secrets.local_filesystem import LocalFilesystemBackend @@ -124,16 +124,16 @@ class TestLoadConnection(unittest.TestCase): ( ("CONN_ID=mysql://host_1/", {"CONN_ID": ["mysql://host_1"]}), ( - "CONN_ID=mysql://host_1/\nCONN_ID=mysql://host_2/", - {"CONN_ID": ["mysql://host_1", "mysql://host_2"]}, + "CONN_ID1=mysql://host_1/\nCONN_ID2=mysql://host_2/", + {"CONN_ID1": ["mysql://host_1"], "CONN_ID2": ["mysql://host_2"]}, ), ( - "CONN_ID=mysql://host_1/\n # AAAA\nCONN_ID=mysql://host_2/", - {"CONN_ID": ["mysql://host_1", "mysql://host_2"]}, + "CONN_ID1=mysql://host_1/\n # AAAA\nCONN_ID2=mysql://host_2/", + {"CONN_ID1": ["mysql://host_1"], "CONN_ID2": ["mysql://host_2"]}, ), ( - "\n\n\n\nCONN_ID=mysql://host_1/\n\n\n\n\nCONN_ID=mysql://host_2/\n\n\n", - {"CONN_ID": ["mysql://host_1", "mysql://host_2"]}, + "\n\n\n\nCONN_ID1=mysql://host_1/\n\n\n\n\nCONN_ID2=mysql://host_2/\n\n\n", + {"CONN_ID1": ["mysql://host_1"], "CONN_ID2": ["mysql://host_2"]}, ), ) ) @@ -162,16 +162,8 @@ def test_env_file_invalid_format(self, content, expected_message): ( ({"CONN_ID": "mysql://host_1"}, {"CONN_ID": ["mysql://host_1"]}), ({"CONN_ID": ["mysql://host_1"]}, {"CONN_ID": ["mysql://host_1"]}), - ( - {"CONN_ID": ["mysql://host_1", "mysql://host_2"]}, - {"CONN_ID": ["mysql://host_1", "mysql://host_2"]}, - ), ({"CONN_ID": {"uri": "mysql://host_1"}}, {"CONN_ID": ["mysql://host_1"]}), ({"CONN_ID": [{"uri": "mysql://host_1"}]}, {"CONN_ID": ["mysql://host_1"]}), - ( - {"CONN_ID": [{"uri": "mysql://host_1"}, {"uri": "mysql://host_2"}]}, - {"CONN_ID": ["mysql://host_1", "mysql://host_2"]}, - ), ) ) def test_json_file_should_load_connection(self, file_content, expected_connection_uris): @@ -211,16 +203,8 @@ def test_missing_file(self, mock_exists): ( ("""CONN_A: 'mysql://host_a'""", {"CONN_A": ["mysql://host_a"]}), (""" - CONN_B: - - 'mysql://host_a' - - 'mysql://host_b' - """, {"CONN_B": ["mysql://host_a", "mysql://host_b"]}), - (""" conn_a: mysql://hosta conn_b: - - mysql://hostb - - mysql://hostc - conn_c: conn_type: scheme host: host schema: lschema @@ -231,8 +215,8 @@ def test_missing_file(self, mock_exists): extra__google_cloud_platform__keyfile_dict: a: b extra__google_cloud_platform__keyfile_path: asaa""", - {"conn_a": ["mysql://hosta"], "conn_b": ["mysql://hostb", "mysql://hostc"], - "conn_c": [''.join("""scheme://Login:None@host:1234/lschema? + {"conn_a": ["mysql://hosta"], + "conn_b": [''.join("""scheme://Login:None@host:1234/lschema? extra__google_cloud_platform__keyfile_dict=%7B%27a%27%3A+%27b%27%7D &extra__google_cloud_platform__keyfile_path=asaa""".split())]}), ) @@ -316,6 +300,44 @@ def test_yaml_invalid_extra(self, file_content, expected_message): with self.assertRaisesRegex(AirflowException, re.escape(expected_message)): local_filesystem.load_connections("a.yaml") + @parameterized.expand( + ( + "CONN_ID=mysql://host_1/\nCONN_ID=mysql://host_2/", + ), + ) + def test_ensure_unique_connection_env(self, file_content): + with mock_local_file(file_content): + with self.assertRaises(ConnectionNotUnique): + local_filesystem.load_connections("a.env") + + @parameterized.expand( + ( + ( + {"CONN_ID": ["mysql://host_1", "mysql://host_2"]}, + ), + ( + {"CONN_ID": [{"uri": "mysql://host_1"}, {"uri": "mysql://host_2"}]}, + ), + ) + ) + def test_ensure_unique_connection_json(self, file_content): + with mock_local_file(json.dumps(file_content)): + with self.assertRaises(ConnectionNotUnique): + local_filesystem.load_connections("a.json") + + @parameterized.expand( + ( + (""" + conn_a: + - mysql://hosta + - mysql://hostb"""), + ), + ) + def test_ensure_unique_connection_yaml(self, file_content): + with mock_local_file(file_content): + with self.assertRaises(ConnectionNotUnique): + local_filesystem.load_connections("a.yaml") + class TestLocalFileBackend(unittest.TestCase): def test_should_read_variable(self): @@ -328,11 +350,11 @@ def test_should_read_variable(self): def test_should_read_connection(self): with NamedTemporaryFile(suffix=".env") as tmp_file: - tmp_file.write("CONN_A=mysql://host_a\nCONN_A=mysql://host_b".encode()) + tmp_file.write("CONN_A=mysql://host_a".encode()) tmp_file.flush() backend = LocalFilesystemBackend(connections_file_path=tmp_file.name) self.assertEqual( - ["mysql://host_a", "mysql://host_b"], + ["mysql://host_a"], [conn.get_uri() for conn in backend.get_connections("CONN_A")], ) self.assertIsNone(backend.get_variable("CONN_B")) diff --git a/tests/secrets/test_secrets_backends.py b/tests/secrets/test_secrets_backends.py index a6917cc14e141..08f8dd4573c8e 100644 --- a/tests/secrets/test_secrets_backends.py +++ b/tests/secrets/test_secrets_backends.py @@ -71,17 +71,15 @@ def test_connection_env_secrets_backend(self): self.assertEqual(sample_conn_1.host.lower(), conn.host) def test_connection_metastore_secrets_backend(self): - sample_conn_2a = SampleConn("sample_2", "A") - sample_conn_2b = SampleConn("sample_2", "B") + sample_conn_2 = SampleConn("sample_2", "A") with create_session() as session: - session.add(sample_conn_2a.conn) - session.add(sample_conn_2b.conn) + session.add(sample_conn_2.conn) session.commit() metastore_backend = MetastoreBackend() conn_list = metastore_backend.get_connections("sample_2") host_list = {x.host for x in conn_list} self.assertEqual( - {sample_conn_2a.host.lower(), sample_conn_2b.host.lower()}, set(host_list) + {sample_conn_2.host.lower()}, set(host_list) ) @mock.patch.dict('os.environ', { diff --git a/tests/sensors/test_base_sensor.py b/tests/sensors/test_base_sensor.py index a91412c0e77f1..cc0bbef359524 100644 --- a/tests/sensors/test_base_sensor.py +++ b/tests/sensors/test_base_sensor.py @@ -139,7 +139,10 @@ def test_soft_fail(self): tis = dr.get_task_instances() self.assertEqual(len(tis), 2) for ti in tis: - self.assertEqual(ti.state, State.SKIPPED) + if ti.task_id == SENSOR_OP: + self.assertEqual(ti.state, State.SKIPPED) + if ti.task_id == DUMMY_OP: + self.assertEqual(ti.state, State.NONE) def test_soft_fail_with_retries(self): sensor = self._make_sensor( @@ -166,7 +169,10 @@ def test_soft_fail_with_retries(self): tis = dr.get_task_instances() self.assertEqual(len(tis), 2) for ti in tis: - self.assertEqual(ti.state, State.SKIPPED) + if ti.task_id == SENSOR_OP: + self.assertEqual(ti.state, State.SKIPPED) + if ti.task_id == DUMMY_OP: + self.assertEqual(ti.state, State.NONE) def test_ok_with_reschedule(self): sensor = self._make_sensor( @@ -294,7 +300,10 @@ def test_soft_fail_with_reschedule(self): tis = dr.get_task_instances() self.assertEqual(len(tis), 2) for ti in tis: - self.assertEqual(ti.state, State.SKIPPED) + if ti.task_id == SENSOR_OP: + self.assertEqual(ti.state, State.SKIPPED) + if ti.task_id == DUMMY_OP: + self.assertEqual(ti.state, State.NONE) def test_ok_with_reschedule_and_retry(self): sensor = self._make_sensor( diff --git a/tests/sensors/test_timeout_sensor.py b/tests/sensors/test_timeout_sensor.py index 810a6e10bde4e..70228ddc48584 100644 --- a/tests/sensors/test_timeout_sensor.py +++ b/tests/sensors/test_timeout_sensor.py @@ -43,10 +43,9 @@ class TimeoutTestSensor(BaseSensorOperator): @apply_defaults def __init__(self, return_value=False, - *args, **kwargs): self.return_value = return_value - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def poke(self, context): return self.return_value diff --git a/tests/test_project_structure.py b/tests/test_project_structure.py index 1795b83cf5ce2..e30cca5dba8bc 100644 --- a/tests/test_project_structure.py +++ b/tests/test_project_structure.py @@ -29,14 +29,8 @@ MISSING_TEST_FILES = { 'tests/providers/google/cloud/log/test_gcs_task_handler.py', - 'tests/providers/google/cloud/operators/test_datastore.py', - 'tests/providers/google/cloud/utils/test_field_sanitizer.py', - 'tests/providers/google/cloud/utils/test_field_validator.py', - 'tests/providers/google/cloud/utils/test_mlengine_prediction_summary.py', 'tests/providers/microsoft/azure/sensors/test_azure_cosmos.py', 'tests/providers/microsoft/azure/log/test_wasb_task_handler.py', - 'tests/providers/microsoft/mssql/hooks/test_mssql.py', - 'tests/providers/samba/hooks/test_samba.py' } diff --git a/tests/test_utils/mock_operators.py b/tests/test_utils/mock_operators.py index 6a25d9c8ccc56..36bca43aa7f22 100644 --- a/tests/test_utils/mock_operators.py +++ b/tests/test_utils/mock_operators.py @@ -121,8 +121,8 @@ def operator_extra_links(self): ) @apply_defaults - def __init__(self, bash_command=None, *args, **kwargs): - super(CustomOperator, self).__init__(*args, **kwargs) + def __init__(self, bash_command=None, **kwargs): + super(CustomOperator, self).__init__(**kwargs) self.bash_command = bash_command def execute(self, context): @@ -170,9 +170,9 @@ def __init__(self, *args, **kwargs): class DeprecatedOperator(BaseOperator): @apply_defaults - def __init__(self, *args, **kwargs): + def __init__(self, **kwargs): warnings.warn("This operator is deprecated.", DeprecationWarning, stacklevel=4) - super().__init__(*args, **kwargs) + super().__init__(**kwargs) def execute(self, context): pass diff --git a/tests/test_utils/system_tests_class.py b/tests/test_utils/system_tests_class.py index 217b2ef2bfa64..63e7c07a3f043 100644 --- a/tests/test_utils/system_tests_class.py +++ b/tests/test_utils/system_tests_class.py @@ -19,12 +19,12 @@ import shutil import sys from datetime import datetime +from pathlib import Path from unittest import TestCase from airflow.configuration import AIRFLOW_HOME, AirflowConfigParser, get_airflow_config from airflow.exceptions import AirflowException from airflow.models.dagbag import DagBag -from airflow.utils.file import mkdirs from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.state import State from tests.test_utils import AIRFLOW_MAIN_FOLDER @@ -94,7 +94,7 @@ def tearDown(self) -> None: print(f"Saving all log files to {logs_folder}/previous_runs/{date_str}") print() target_dir = os.path.join(logs_folder, "previous_runs", date_str) - mkdirs(target_dir, 0o755) + Path(target_dir).mkdir(parents=True, exist_ok=True, mode=0o755) files = os.listdir(logs_folder) for file in files: if file != "previous_runs": diff --git a/tests/utils/test_decorators.py b/tests/utils/test_decorators.py index d2d593908bac2..94920b9dee4cc 100644 --- a/tests/utils/test_decorators.py +++ b/tests/utils/test_decorators.py @@ -31,8 +31,8 @@ def __init__(self, test_param, params=None, default_args=None): # pylint: disab class DummySubClass(DummyClass): @apply_defaults - def __init__(self, test_sub_param, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, test_sub_param, **kwargs): + super().__init__(**kwargs) self.test_sub_param = test_sub_param