From 5963c47a768956425e7415e1587b9cc459f79a6f Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Tue, 1 Jun 2021 12:16:18 +0200 Subject: [PATCH] Rename the main branch of the Airflow repo to be `main` (#16149) (cherry picked from commit 1e647029e469c1bb17e9ad051d0184f3357644c3) --- .asf.yaml | 4 +- .github/PULL_REQUEST_TEMPLATE.md | 4 +- .github/boring-cyborg.yml | 10 +- .github/workflows/build-images.yml | 10 +- .github/workflows/ci.yml | 30 ++--- .github/workflows/codeql-analysis.yml | 2 +- .../label_when_reviewed_workflow_run.yml | 8 +- .github/workflows/repo_sync.yml | 36 ------ BREEZE.rst | 28 ++--- CI.rst | 56 ++++----- COMMITTERS.rst | 4 +- CONTRIBUTING.rst | 78 ++++++------ CONTRIBUTORS_QUICK_START.rst | 60 ++++----- Dockerfile | 6 +- Dockerfile.ci | 6 +- IMAGES.rst | 32 ++--- INSTALL | 12 +- LOCAL_VIRTUALENV.rst | 8 +- PULL_REQUEST_WORKFLOW.rst | 12 +- README.md | 20 +-- TESTING.rst | 12 +- UPDATING.md | 2 +- airflow/config_templates/config.yml | 2 +- airflow/config_templates/default_airflow.cfg | 2 +- airflow/models/dagpickle.py | 2 +- airflow/providers/apache/beam/README.md | 10 +- airflow/providers/google/CHANGELOG.rst | 46 +++---- .../cloud/example_dags/example_cloud_build.py | 2 +- airflow/providers/qubole/sensors/qubole.py | 4 +- airflow/providers/slack/operators/slack.py | 2 +- breeze | 12 +- codecov.yml | 2 - dev/README_RELEASE_AIRFLOW.md | 6 +- dev/README_RELEASE_AIRFLOW_UPGRADE_CHECK.md | 2 +- dev/README_RELEASE_PROVIDER_PACKAGES.md | 4 +- dev/airflow-github | 10 +- dev/provider_packages/README.md | 2 +- .../prepare_provider_packages.py | 6 +- dev/retag_docker_images.py | 8 +- .../index.rst | 2 +- .../apache-airflow-providers-amazon/index.rst | 2 +- .../index.rst | 2 +- .../index.rst | 2 +- .../index.rst | 2 +- .../index.rst | 2 +- .../index.rst | 2 +- .../index.rst | 2 +- .../index.rst | 2 +- docs/apache-airflow-providers-asana/index.rst | 2 +- .../index.rst | 2 +- .../index.rst | 2 +- .../index.rst | 2 +- .../apache-airflow-providers-docker/index.rst | 2 +- .../example-dags.rst | 12 +- .../operators/cloud/index.rst | 2 +- .../operators/marketing_platform/index.rst | 2 +- docs/apache-airflow-providers-http/index.rst | 2 +- docs/apache-airflow-providers-jdbc/index.rst | 2 +- .../index.rst | 2 +- .../index.rst | 2 +- .../index.rst | 2 +- docs/apache-airflow-providers-mysql/index.rst | 2 +- docs/apache-airflow-providers-neo4j/index.rst | 2 +- .../index.rst | 2 +- .../apache-airflow-providers-plexus/index.rst | 2 +- .../index.rst | 2 +- .../apache-airflow-providers-qubole/index.rst | 2 +- .../index.rst | 2 +- .../index.rst | 2 +- .../index.rst | 2 +- .../apache-airflow-providers-sqlite/index.rst | 2 +- .../index.rst | 2 +- .../index.rst | 2 +- .../apache-airflow-providers-yandex/index.rst | 2 +- .../operators.rst | 2 +- .../howto/create-update-providers.rst | 8 +- docs/apache-airflow-providers/index.rst | 4 +- .../operators-and-hooks-ref/google.rst | 4 +- docs/apache-airflow/backport-providers.rst | 115 ------------------ docs/apache-airflow/dag-run.rst | 2 +- .../deprecated-rest-api-ref.rst | 2 +- docs/apache-airflow/installation.rst | 18 +-- docs/docker-stack/build-arg-ref.rst | 4 +- docs/docker-stack/build.rst | 12 +- .../github-different-repository.sh | 4 +- .../customizing/github-master.sh | 4 +- docs/docker-stack/entrypoint.rst | 4 +- docs/docker-stack/index.rst | 2 +- .../docs_build/dev_index_template.html.jinja2 | 2 +- docs/exts/docs_build/lint_checks.py | 2 +- docs/helm-chart/airflow-configuration.rst | 2 +- .../ci/constraints/ci_branch_constraints.sh | 4 +- .../ci/constraints/ci_commit_constraints.sh | 2 +- scripts/ci/images/ci_build_dockerhub.sh | 2 +- .../ci/images/ci_prepare_ci_image_on_ci.sh | 2 +- .../ci/images/ci_prepare_prod_image_on_ci.sh | 2 +- scripts/ci/libraries/_build_images.sh | 16 +-- .../ci/libraries/_docker_engine_resources.sh | 2 +- scripts/ci/libraries/_initialization.sh | 12 +- scripts/ci/libraries/_sanity_checks.sh | 2 +- scripts/ci/openapi/client_codegen_diff.sh | 2 +- scripts/ci/selective_ci_checks.sh | 8 +- .../docker/install_airflow_from_branch_tip.sh | 2 +- scripts/in_container/_in_container_utils.sh | 2 +- .../update_quarantined_test_status.py | 2 +- tests/conftest.py | 2 +- .../cloud/operators/test_cloud_build.py | 4 +- .../test_cloud_build_system_helper.py | 2 +- .../perf/scheduler_dag_execution_timing.py | 2 +- 109 files changed, 372 insertions(+), 531 deletions(-) delete mode 100644 .github/workflows/repo_sync.yml delete mode 100644 docs/apache-airflow/backport-providers.rst diff --git a/.asf.yaml b/.asf.yaml index a9e6961c7e600..659fd1d65868b 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -41,10 +41,10 @@ github: rebase: false protected_branches: - master: + main: required_pull_request_reviews: required_approving_review_count: 1 - main: + v2-1-stable: required_pull_request_reviews: required_approving_review_count: 1 v1-10-stable: diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 1e3c23d5ad347..3846ae698c717 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -17,7 +17,7 @@ http://chris.beams.io/posts/git-commit/ --- **^ Add meaningful description above** -Read the **[Pull Request Guidelines](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#pull-request-guidelines)** for more information. +Read the **[Pull Request Guidelines](https://github.com/apache/airflow/blob/main/CONTRIBUTING.rst#pull-request-guidelines)** for more information. In case of fundamental code change, Airflow Improvement Proposal ([AIP](https://cwiki.apache.org/confluence/display/AIRFLOW/Airflow+Improvements+Proposals)) is needed. In case of a new dependency, check compliance with the [ASF 3rd Party License Policy](https://www.apache.org/legal/resolved.html#category-x). -In case of backwards incompatible changes please leave a note in [UPDATING.md](https://github.com/apache/airflow/blob/master/UPDATING.md). +In case of backwards incompatible changes please leave a note in [UPDATING.md](https://github.com/apache/airflow/blob/main/UPDATING.md). diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index 19b4c3fb148b0..7d97c2f6eafce 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -199,20 +199,20 @@ labelerFlags: firstPRWelcomeComment: > Congratulations on your first Pull Request and welcome to the Apache Airflow community! If you have any issues or are unsure about any anything please check our - Contribution Guide (https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst) + Contribution Guide (https://github.com/apache/airflow/blob/main/CONTRIBUTING.rst) Here are some useful points: - Pay attention to the quality of your code (flake8, pylint and type annotations). Our [pre-commits]( - https://github.com/apache/airflow/blob/master/STATIC_CODE_CHECKS.rst#prerequisites-for-pre-commit-hooks) + https://github.com/apache/airflow/blob/main/STATIC_CODE_CHECKS.rst#prerequisites-for-pre-commit-hooks) will help you with that. - In case of a new feature add useful documentation (in docstrings or in `docs/` directory). Adding a new operator? Check this short - [guide](https://github.com/apache/airflow/blob/master/docs/apache-airflow/howto/custom-operator.rst) + [guide](https://github.com/apache/airflow/blob/main/docs/apache-airflow/howto/custom-operator.rst) Consider adding an example DAG that shows how users should use it. - - Consider using [Breeze environment](https://github.com/apache/airflow/blob/master/BREEZE.rst) for testing + - Consider using [Breeze environment](https://github.com/apache/airflow/blob/main/BREEZE.rst) for testing locally, itโ€™s a heavy docker but it ships with a working Airflow and a lot of integrations. - Be patient and persistent. It might take some time to get a review or get the final approval from @@ -222,7 +222,7 @@ firstPRWelcomeComment: > communication including (but not limited to) comments on Pull Requests, Mailing list and Slack. - Be sure to read the [Airflow Coding style]( - https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#coding-style-and-best-practices). + https://github.com/apache/airflow/blob/main/CONTRIBUTING.rst#coding-style-and-best-practices). Apache Airflow is a community-driven project and together we are making it better ๐Ÿš€. diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index cf71c8357fd42..34ddcc39d8ecb 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -22,7 +22,7 @@ on: # yamllint disable-line rule:truthy - cron: '28 0 * * *' pull_request_target: push: - branches: ['main', 'master', 'v1-10-test', 'v1-10-stable', 'v2-0-test'] + branches: ['main', 'v1-10-test', 'v1-10-stable', 'v2-0-test'] env: MOUNT_SELECTED_LOCAL_SOURCES: "false" FORCE_ANSWER_TO_QUESTIONS: "yes" @@ -130,7 +130,7 @@ jobs: strategy: matrix: # We need to attempt to build all possible versions here because pull_request_target - # event is run from master for both master and v1-10-tests + # event is run for both main and v1-10-tests python-version: ${{ fromJson(needs.build-info.outputs.allPythonVersions) }} fail-fast: true if: needs.build-info.outputs.image-build == 'true' @@ -158,7 +158,7 @@ jobs: # We cannot "source" the script here because that would be a security problem (we cannot run # any code that comes from the sources coming from the PR. Therefore we extract the # DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH via custom grep/awk/sed commands - # Also 2.7 and 3.5 versions are not allowed to proceed on master + # Also 2.7 and 3.5 versions are not allowed to proceed on main id: defaults run: | DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \ @@ -218,7 +218,7 @@ jobs: strategy: matrix: # We need to attempt to build all possible versions here because pull_request_target - # event is run from master for both master and v1-10-tests + # event is run for both main and v1-10-tests python-version: ${{ fromJson(needs.build-info.outputs.allPythonVersions) }} fail-fast: true if: needs.build-info.outputs.image-build == 'true' @@ -245,7 +245,7 @@ jobs: # We cannot "source" the script here because that would be a security problem (we cannot run # any code that comes from the sources coming from the PR. Therefore we extract the # DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH via custom grep/awk/sed commands - # Also 2.7 and 3.5 versions are not allowed to proceed on master + # Also 2.7 and 3.5 versions are not allowed to proceed on main id: defaults run: | DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \ diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 64312b87a894c..7c242941f4838 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -21,9 +21,9 @@ on: # yamllint disable-line rule:truthy schedule: - cron: '28 0 * * *' push: - branches: ['master', 'v[0-9]+-[0-9]+-test'] + branches: ['main', 'v[0-9]+-[0-9]+-test'] pull_request: - branches: ['master', 'v[0-9]+-[0-9]+-test', 'v[0-9]+-[0-9]+-stable'] + branches: ['main', 'v[0-9]+-[0-9]+-test', 'v[0-9]+-[0-9]+-stable'] env: MOUNT_SELECTED_LOCAL_SOURCES: "false" @@ -494,7 +494,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: Configure AWS credentials uses: ./.github/actions/configure-aws-credentials if: > - github.ref == 'refs/heads/master' && github.repository == 'apache/airflow' && + github.ref == 'refs/heads/main' && github.repository == 'apache/airflow' && github.event_name == 'push' with: aws-access-key-id: ${{ secrets.DOCS_AWS_ACCESS_KEY_ID }} @@ -502,7 +502,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" aws-region: eu-central-1 - name: "Upload documentation to AWS S3" if: > - github.ref == 'refs/heads/master' && github.repository == 'apache/airflow' && + github.ref == 'refs/heads/main' && github.repository == 'apache/airflow' && github.event_name == 'push' run: aws s3 sync --delete ./files/documentation s3://apache-airflow-docs @@ -517,13 +517,13 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}} VERSION_SUFFIX_FOR_PYPI: ".dev0" GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} - if: needs.build-info.outputs.image-build == 'true' && needs.build-info.outputs.default-branch == 'master' + if: needs.build-info.outputs.image-build == 'true' && needs.build-info.outputs.default-branch == 'main' steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 with: persist-credentials: false - if: needs.build-info.outputs.default-branch == 'master' + if: needs.build-info.outputs.default-branch == 'main' - name: "Setup python" uses: actions/setup-python@v2 with: @@ -564,13 +564,13 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}} VERSION_SUFFIX_FOR_PYPI: ".dev0" GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} - if: needs.build-info.outputs.image-build == 'true' && needs.build-info.outputs.default-branch == 'master' + if: needs.build-info.outputs.image-build == 'true' && needs.build-info.outputs.default-branch == 'main' steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 with: persist-credentials: false - if: needs.build-info.outputs.default-branch == 'master' + if: needs.build-info.outputs.default-branch == 'main' - name: "Setup python" uses: actions/setup-python@v2 with: @@ -839,8 +839,8 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" uses: actions/setup-python@v2 with: python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} - - name: "Set issue id for master" - if: github.ref == 'refs/heads/master' + - name: "Set issue id for main" + if: github.ref == 'refs/heads/main' run: | echo "ISSUE_ID=10118" >> $GITHUB_ENV - name: "Set issue id for v1-10-stable" @@ -1045,7 +1045,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - docs # TODO: Generalize me (find a better way to select matching branches) if: > - (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test' || + (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/v1-10-test' || github.ref == 'refs/heads/v2-0-test' || github.ref == 'refs/heads/v2-1-test') && github.event_name != 'schedule' strategy: @@ -1070,7 +1070,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: Set push-python-image id: push-python-image run: | - if [[ "${REF}" == 'refs/head/master' || "${REF}" == 'refs/head/main' ]]; then + if [[ "${REF}" == 'refs/head/main' || "${REF}" == 'refs/head/main' ]]; then echo "::set-output name=wanted::true" else echo "::set-output name=wanted::false" @@ -1106,7 +1106,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - docs # TODO: Generalize me (find a better way to select matching branches) if: > - (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test' || + (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/v1-10-test' || github.ref == 'refs/heads/v2-0-test' || github.ref == 'refs/heads/v2-1-test') && github.event_name != 'schedule' strategy: @@ -1155,7 +1155,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" # Only run it for direct pushes # TODO: Generalize me (find a better way to select matching branches) if: > - github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test' || + github.ref == 'refs/heads/main' || github.ref == 'refs/heads/v1-10-test' || github.ref == 'refs/heads/v2-0-test' || github.ref == 'refs/heads/v2-1-test' steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" @@ -1241,7 +1241,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" github_token: ${{ secrets.GITHUB_TOKEN }} tags: true force: true - branch: master + branch: main tests-ui: timeout-minutes: 10 diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 5c4b0afd6aa0d..87d6c995a02ff 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -20,7 +20,7 @@ name: "CodeQL" on: # yamllint disable-line rule:truthy push: - branches: [master, main] + branches: [main] schedule: - cron: '0 2 * * *' diff --git a/.github/workflows/label_when_reviewed_workflow_run.yml b/.github/workflows/label_when_reviewed_workflow_run.yml index 59bde48284a00..e64b85bab44f8 100644 --- a/.github/workflows/label_when_reviewed_workflow_run.yml +++ b/.github/workflows/label_when_reviewed_workflow_run.yml @@ -72,7 +72,7 @@ jobs: ref: ${{ steps.source-run-info.outputs.targetCommitSha }} fetch-depth: 2 persist-credentials: false - # checkout the master version again, to use the right script in master workflow + # checkout the main branch again, to use the right script in main workflow - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 with: @@ -107,7 +107,7 @@ jobs: comment: > The PR most likely needs to run full matrix of tests because it modifies parts of the core of Airflow. However, committers might decide to merge it quickly and take the risk. - If they don't merge it quickly - please rebase it to the latest master at your convenience, + If they don't merge it quickly - please rebase it to the latest main at your convenience, or amend the last commit of the PR, and push it with --force-with-lease. - name: "Initiate GitHub Check forcing rerun of SH ${{ github.event.pull_request.head.sha }}" uses: ./.github/actions/checks-action @@ -139,7 +139,7 @@ jobs: The PR is likely OK to be merged with just subset of tests for default Python and Database versions without running the full matrix of tests, because it does not modify the core of Airflow. If the committers decide that the full tests matrix is needed, they will add the label - 'full tests needed'. Then you should rebase to the latest master or amend the last commit + 'full tests needed'. Then you should rebase to the latest main or amend the last commit of the PR, and push it with --force-with-lease. - name: "Label when approved by committers for PRs that do not require tests at all" uses: ./.github/actions/label-when-approved-action @@ -153,7 +153,7 @@ jobs: comment: > The PR is likely ready to be merged. No tests are needed as no important environment files, nor python files were modified by it. However, committers might decide that full test matrix is - needed and add the 'full tests needed' label. Then you should rebase it to the latest master + needed and add the 'full tests needed' label. Then you should rebase it to the latest main or amend the last commit of the PR, and push it with --force-with-lease. - name: Update Selective Build check uses: ./.github/actions/checks-action diff --git a/.github/workflows/repo_sync.yml b/.github/workflows/repo_sync.yml deleted file mode 100644 index 76afc19213948..0000000000000 --- a/.github/workflows/repo_sync.yml +++ /dev/null @@ -1,36 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# ---- -name: Force sync master from apache/airflow -on: # yamllint disable-line rule:truthy - workflow_dispatch: -jobs: - repo-sync: - if: github.repository != 'apache/airflow' - runs-on: ubuntu-20.04 - steps: - - uses: actions/checkout@v2 - with: - persist-credentials: false - - name: repo-sync - uses: repo-sync/github-sync@v2 - with: - source_repo: "apache/airflow" - source_branch: "master" - destination_branch: "master" - github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/BREEZE.rst b/BREEZE.rst index f57b2cbec4c60..df8a55711e299 100644 --- a/BREEZE.rst +++ b/BREEZE.rst @@ -541,7 +541,7 @@ For all development tasks, unit tests, integration tests, and static code checks **CI image** maintained on the DockerHub in the ``apache/airflow-ci`` repository. This Docker image contains a lot of test-related packages (size of ~1GB). Its tag follows the pattern of ``-python-ci`` -(for example, ``apache/airflow:master-python3.6-ci`` or ``apache/airflow-ci:v2-1-test-python3.6-ci``). +(for example, ``apache/airflow:main-python3.6-ci`` or ``apache/airflow-ci:v2-1-test-python3.6-ci``). The image is built using the ``_ Dockerfile. The CI image is built automatically as needed, however it can be rebuilt manually with @@ -637,7 +637,7 @@ Building Production images The **Production image** is also maintained on the DockerHub in both ``apache/airflow`` (for tagged and latest releases) or ``apache/airflow-ci`` repository (for branches). This Docker image (built using official Dockerfile) contains size-optimised Airflow installation with selected extras and dependencies. Its tag follows -the pattern of ``-python`` (for example, ``apache/airflow-ci:master-python3.6`` +the pattern of ``-python`` (for example, ``apache/airflow-ci:main-python3.6`` or ``apache/airflow-ci:v2-1-test-python3.6``) or in case of production images tagged with releases ``apache/airflow:2.1.2-python3.8`` or ``apache/airflow:latest`` or ``apache/airflow:latest-python3.8``. @@ -730,7 +730,7 @@ If you want ever need to get a list of the files that will be checked (for troub .. code-block:: bash breeze static-check identity --verbose # currently staged files - breeze static-check identity --verbose -- --from-ref $(git merge-base master HEAD) --to-ref HEAD # branch updates + breeze static-check identity --verbose -- --from-ref $(git merge-base main HEAD) --to-ref HEAD # branch updates Building the Documentation -------------------------- @@ -773,8 +773,8 @@ easily identify the location the problems with documentation originated from. Generating constraints ---------------------- -Whenever setup.py gets modified, the CI master job will re-generate constraint files. Those constraint -files are stored in separated orphan branches: ``constraints-master``, ``constraints-2-0``. +Whenever setup.py gets modified, the CI main job will re-generate constraint files. Those constraint +files are stored in separated orphan branches: ``constraints-main``, ``constraints-2-0``. Those are constraint files as described in detail in the ``_ contributing documentation. @@ -795,7 +795,7 @@ Constraints are generated separately for each python version and there are separ * "constraints-source-providers" - those are constraints generated by using providers installed from current sources. While adding new providers their dependencies might change, so this set of providers - is the current set of the constraints for airflow and providers from the current master sources. + is the current set of the constraints for airflow and providers from the current main sources. Those providers are used by CI system to keep "stable" set of constraints. Use ``source-providers`` mode for that. @@ -1272,7 +1272,7 @@ This is the current syntax for `./breeze <./breeze>`_: -t, --install-airflow-reference INSTALL_AIRFLOW_REFERENCE Installs Airflow directly from reference in GitHub when building PROD image. - This can be a GitHub branch like master or v2-1-test, or a tag like 2.1.0a1. + This can be a GitHub branch like main or v2-1-test, or a tag like 2.1.0a1. --installation-method INSTALLATION_METHOD Method of installing Airflow in PROD image - either from the sources ('.') @@ -1420,7 +1420,7 @@ This is the current syntax for `./breeze <./breeze>`_: --use-github-registry flag) to build images. The pulled images will be used as cache. Those builds are usually faster than when ''--build-cache-local'' with the exception if the registry images are not yet updated. The DockerHub images are updated nightly and the - GitHub images are updated after merges to master so it might be that the images are still + GitHub images are updated after merges to main so it might be that the images are still outdated vs. the latest version of the Dockerfiles you are using. In this case, the ''--build-cache-local'' might be faster, especially if you iterate and change the Dockerfiles yourself. @@ -1535,7 +1535,7 @@ This is the current syntax for `./breeze <./breeze>`_: Generates pinned constraint files with all extras from setup.py. Those files are generated in files folder - separate files for different python version. Those constraint files when - pushed to orphan constraints-master, constraints-2-0 branches are used + pushed to orphan constraints-main, constraints-2-0 branches are used to generate repeatable CI builds as well as run repeatable production image builds and upgrades when you want to include installing or updating some of the released providers released at the time particular airflow version was released. You can use those @@ -2074,7 +2074,7 @@ This is the current syntax for `./breeze <./breeze>`_: --use-github-registry flag) to build images. The pulled images will be used as cache. Those builds are usually faster than when ''--build-cache-local'' with the exception if the registry images are not yet updated. The DockerHub images are updated nightly and the - GitHub images are updated after merges to master so it might be that the images are still + GitHub images are updated after merges to main so it might be that the images are still outdated vs. the latest version of the Dockerfiles you are using. In this case, the ''--build-cache-local'' might be faster, especially if you iterate and change the Dockerfiles yourself. @@ -2255,9 +2255,9 @@ This is the current syntax for `./breeze <./breeze>`_: 'breeze static-check mypy -- --files tests/core.py' 'breeze static-check mypy -- --all-files' - To check all files that differ between you current branch and master run: + To check all files that differ between you current branch and main run: - 'breeze static-check all -- --from-ref $(git merge-base master HEAD) --to-ref HEAD' + 'breeze static-check all -- --from-ref $(git merge-base main HEAD) --to-ref HEAD' To check all files that are in the HEAD commit run: @@ -2485,7 +2485,7 @@ This is the current syntax for `./breeze <./breeze>`_: -t, --install-airflow-reference INSTALL_AIRFLOW_REFERENCE Installs Airflow directly from reference in GitHub when building PROD image. - This can be a GitHub branch like master or v2-1-test, or a tag like 2.1.0a1. + This can be a GitHub branch like main or v2-1-test, or a tag like 2.1.0a1. --installation-method INSTALLATION_METHOD Method of installing Airflow in PROD image - either from the sources ('.') @@ -2656,7 +2656,7 @@ This is the current syntax for `./breeze <./breeze>`_: --use-github-registry flag) to build images. The pulled images will be used as cache. Those builds are usually faster than when ''--build-cache-local'' with the exception if the registry images are not yet updated. The DockerHub images are updated nightly and the - GitHub images are updated after merges to master so it might be that the images are still + GitHub images are updated after merges to main so it might be that the images are still outdated vs. the latest version of the Dockerfiles you are using. In this case, the ''--build-cache-local'' might be faster, especially if you iterate and change the Dockerfiles yourself. diff --git a/CI.rst b/CI.rst index 6551f188e6e70..769d8d851f811 100644 --- a/CI.rst +++ b/CI.rst @@ -21,7 +21,7 @@ CI Environment ============== Continuous Integration is important component of making Apache Airflow robust and stable. We are running -a lot of tests for every pull request, for master and v2-*-test branches and regularly as CRON jobs. +a lot of tests for every pull request, for main and v2-*-test branches and regularly as CRON jobs. Our execution environment for CI is `GitHub Actions `_. GitHub Actions (GA) are very well integrated with GitHub code and Workflow and it has evolved fast in 2019/202 to become @@ -57,20 +57,20 @@ Container Registry used as cache For the CI builds of our we are using Container Registry to store results of the "Build Image" workflow and pass it to the "CI Build" workflow. -Currently in master version of Airflow we run tests in 3 different versions of Python (3.6, 3.7, 3.8) +Currently in main version of Airflow we run tests in 3 different versions of Python (3.6, 3.7, 3.8) which means that we have to build 6 images (3 CI ones and 3 PROD ones). Yet we run around 12 jobs with each of the CI images. That is a lot of time to just build the environment to run. Therefore we are utilising ``pull_request_target`` feature of GitHub Actions. This feature allows to run a separate, independent workflow, when the main workflow is run - -this separate workflow is different than the main one, because by default it runs using ``master`` version +this separate workflow is different than the main one, because by default it runs using ``main`` version of the sources but also - and most of all - that it has WRITE access to the repository. This is especially important in our case where Pull Requests to Airflow might come from any repository, and it would be a huge security issue if anyone from outside could utilise the WRITE access to Apache Airflow repository via an external Pull Request. -Thanks to the WRITE access and fact that the 'pull_request_target' by default uses the 'master' version of the +Thanks to the WRITE access and fact that the 'pull_request_target' by default uses the 'main' version of the sources, we can safely run some logic there will checkout the incoming Pull Request, build the container image from the sources from the incoming PR and push such image to an GitHub Docker Registry - so that this image can be built only once and used by all the jobs running tests. The image is tagged with unique @@ -304,13 +304,13 @@ You can use those variables when you try to reproduce the build locally. | | | | | tested set of dependency constraints | | | | | | stored in separated "orphan" branches | | | | | | of the airflow repository | -| | | | | ("constraints-master, "constraints-2-0") | +| | | | | ("constraints-main, "constraints-2-0") | | | | | | but when this flag is set to anything but false | | | | | | (for example commit SHA), they are not used | | | | | | used and "eager" upgrade strategy is used | | | | | | when installing dependencies. We set it | | | | | | to true in case of direct pushes (merges) | -| | | | | to master and scheduled builds so that | +| | | | | to main and scheduled builds so that | | | | | | the constraints are tested. In those builds, | | | | | | in case we determine that the tests pass | | | | | | we automatically push latest set of | @@ -391,7 +391,7 @@ Note that you need to set "CI" variable to true in order to get the same results | CI_TARGET_REPO | ``apache/airflow`` | Target repository for the CI build. Used to | | | | compare incoming changes from PR with the target. | +------------------------------+----------------------+-----------------------------------------------------+ -| CI_TARGET_BRANCH | ``master`` | Target branch where the PR should land. Used to | +| CI_TARGET_BRANCH | ``main`` | Target branch where the PR should land. Used to | | | | compare incoming changes from PR with the target. | +------------------------------+----------------------+-----------------------------------------------------+ | CI_BUILD_ID | ``0`` | Unique id of the build that is kept across re runs | @@ -404,7 +404,7 @@ Note that you need to set "CI" variable to true in order to get the same results | | | [``pull_request``, ``pull_request_target``, | | | | ``schedule``, ``push``] | +------------------------------+----------------------+-----------------------------------------------------+ -| CI_REF | ``refs/head/master`` | Branch in the source repository that is used to | +| CI_REF | ``refs/head/main`` | Branch in the source repository that is used to | | | | make the pull request. | +------------------------------+----------------------+-----------------------------------------------------+ @@ -480,9 +480,9 @@ We are currently in the process of testing using GitHub Container Registry as ca the CI process. The default registry is set to "GitHub Packages", but we are testing the GitHub Container Registry. In case of GitHub Packages, authentication uses GITHUB_TOKEN mechanism. Authentication is needed for both pushing the images (WRITE) and pulling them (READ) - which means that GitHub token -is used in "master" build (WRITE) and in fork builds (READ). For container registry, our images are +is used in "main" build (WRITE) and in fork builds (READ). For container registry, our images are Publicly Visible and we do not need any authentication to pull them so the CONTAINER_REGISTRY_TOKEN is -only set in the "master" builds only ("Build Images" workflow). +only set in the "main" builds only ("Build Images" workflow). Dockerhub Variables =================== @@ -574,7 +574,7 @@ The housekeeping is important - Python base images are refreshed with varying fr usually but sometimes several times per week) with the latest security and bug fixes. Those patch level images releases can occasionally break Airflow builds (specifically Docker image builds based on those images) therefore in PRs we only use latest "good" Python image that we store in the -private GitHub cache. The direct push/master builds are not using registry cache to pull the Python images +private GitHub cache. The direct push/main builds are not using registry cache to pull the Python images - they are directly pulling the images from DockerHub, therefore they will try the latest images after they are released and in case they are fine, CI Docker image is build and tests are passing - those jobs will push the base images to the private GitHub Registry so that they be used by subsequent @@ -583,13 +583,13 @@ PR runs. Scheduled runs -------------- -Those runs are results of (nightly) triggered job - only for ``master`` branch. The +Those runs are results of (nightly) triggered job - only for ``main`` branch. The main purpose of the job is to check if there was no impact of external dependency changes on the Apache Airflow code (for example transitive dependencies released that fail the build). It also checks if the Docker images can be build from the scratch (again - to see if some dependencies have not changed - for example downloaded package releases etc. Another reason for the nightly build is that the builds tags most -recent master with ``nightly-master`` tag so that DockerHub build can pick up the moved tag and prepare a -nightly public master build in the DockerHub registry. The ``v1-10-test`` branch images are build in +recent main with ``nightly-main`` tag so that DockerHub build can pick up the moved tag and prepare a +nightly public main build in the DockerHub registry. The ``v1-10-test`` branch images are build in DockerHub when pushing ``v1-10-stable`` manually. All runs consist of the same jobs, but the jobs behave slightly differently or they are skipped in different @@ -603,13 +603,13 @@ repository, they are not executed in forks - we want to be nice to the contribut free build minutes on GitHub Actions. Sometimes (bugs in DockerHub or prolonged periods when the scheduled builds are failing) -the automated build for nightly master is not executed for a long time. Such builds can be manually +the automated build for nightly main is not executed for a long time. Such builds can be manually prepared and pushed by a maintainer who has the rights to push images to DockerHub (committers need to file JIRA ticket to Apache Infra in order to get an access). .. code-block:: bash - export BRANCH=master + export BRANCH=main export DOCKER_REPO=docker.io/apache/airflow for python_version in "3.6" "3.7" "3.8" ( @@ -747,12 +747,12 @@ Comments: (6) Nightly tag is pushed to the repository only in CRON job and only if all tests pass. This causes the DockerHub images are built automatically and made available to developers. -Force sync master from apache/airflow +Force sync main from apache/airflow ------------------------------------- This is manually triggered workflow (via GitHub UI manual run) that should only be run in GitHub forks. -When triggered, it will force-push the "apache/airflow" master to the fork's master. It's the easiest -way to sync your fork master to the Apache Airflow's one. +When triggered, it will force-push the "apache/airflow" main to the fork's main. It's the easiest +way to sync your fork main to the Apache Airflow's one. Delete old artifacts -------------------- @@ -772,7 +772,7 @@ It is run for JavaScript and Python code. Publishing documentation ------------------------ -Documentation from the ``master`` branch is automatically published on Amazon S3. +Documentation from the ``main`` branch is automatically published on Amazon S3. To make this possible, GitHub Action has secrets set up with credentials for an Amazon Web Service account - ``DOCS_AWS_ACCESS_KEY_ID`` and ``DOCS_AWS_SECRET_ACCESS_KEY``. @@ -787,7 +787,7 @@ Naming conventions for stored images The images produced during the CI builds are stored in the `GitHub Registry `_ -The images are stored with both "latest" tag (for last master push image that passes all the tests as well +The images are stored with both "latest" tag (for last main push image that passes all the tests as well with the tags indicating the origin of the image. The image names follow the patterns: @@ -807,10 +807,10 @@ The image names follow the patterns: | | | | It contains only compiled libraries and minimal set of dependencies to run Airflow. | +--------------+----------------------------+--------------------------------+--------------------------------------------------------------------------------------------+ -* might be either "master" or "v1-10-test" or "v2-*-test" -* - Python version (Major + Minor). For "master" and "v2-*-test" should be in ["3.6", "3.7", "3.8"]. For +* might be either "main" or "v1-10-test" or "v2-*-test" +* - Python version (Major + Minor). For "main" and "v2-*-test" should be in ["3.6", "3.7", "3.8"]. For v1-10-test it should be in ["2.7", "3.5", "3.6". "3.7", "3.8"]. -* - for images that get merged to "master", "v2-*-test" of "v1-10-test", or built as part of a +* - for images that get merged to "main", "v2-*-test" of "v1-10-test", or built as part of a pull request the images are tagged with the (full lenght) commit SHA of that particular branch. For pull requests the SHA used is the tip of the pull request branch. @@ -823,9 +823,9 @@ For example knowing that the CI build was for commit ``cd27124534b46c9688a1d89e7 .. code-block:: bash - docker pull docker.pkg.github.com/apache/airflow/master-python3.6-ci:cd27124534b46c9688a1d89e75fcd137ab5137e3 + docker pull docker.pkg.github.com/apache/airflow/main-python3.6-ci:cd27124534b46c9688a1d89e75fcd137ab5137e3 - docker run -it docker.pkg.github.com/apache/airflow/master-python3.6-ci:cd27124534b46c9688a1d89e75fcd137ab5137e3 + docker run -it docker.pkg.github.com/apache/airflow/main-python3.6-ci:cd27124534b46c9688a1d89e75fcd137ab5137e3 But you usually need to pass more variables and complex setup if you want to connect to a database or @@ -878,7 +878,7 @@ In 2.0 line we currently support Python 3.6, 3.7, 3.8. In order to add a new version the following operations should be done (example uses Python 3.9) -* copy the latest constraints in ``constraints-master`` branch from previous versions and name it +* copy the latest constraints in ``constraints-main`` branch from previous versions and name it using the new Python version (``constraints-3.9.txt``). Commit and push * add the new Python version to `breeze-complete `_ and @@ -911,7 +911,7 @@ In order to add a new version the following operations should be done (example u +-------------+----------------+-----------------------+---------------------+---------------+-----------+---------------+------------------------------------------------------------------------+ | Source type | Source | Docker Tag | Dockerfile location | Build Context | Autobuild | Build caching | Comment | +=============+================+=======================+=====================+===============+===========+===============+========================================================================+ -| Tag | nightly-master | master-python3.9 | Dockerfile | / | x | - | Nightly CI/PROD images from successful scheduled master nightly builds | +| Tag | nightly-main | main-python3.9 | Dockerfile | / | x | - | Nightly CI/PROD images from successful scheduled main nightly builds | +-------------+----------------+-----------------------+---------------------+---------------+-----------+---------------+------------------------------------------------------------------------+ | Branch | v2-*-stable | v2-*-stable-python3.9 | Dockerfile | / | x | | CI/PROD images automatically built pushed stable branch | +-------------+----------------+-----------------------+---------------------+---------------+-----------+---------------+------------------------------------------------------------------------+ diff --git a/COMMITTERS.rst b/COMMITTERS.rst index facb8913efdac..d5883e1192679 100644 --- a/COMMITTERS.rst +++ b/COMMITTERS.rst @@ -22,7 +22,7 @@ Committers and PMC's This document assumes that you know how Airflow's community work, but you would like to learn more about the rules by which we add new members. -Before reading this document, you should be familiar with `Contributor's guide `__. +Before reading this document, you should be familiar with `Contributor's guide `__. Guidelines to become an Airflow Committer ------------------------------------------ @@ -49,7 +49,7 @@ General prerequisites that we look for in all candidates: 2. Visibility on discussions on the dev mailing list, Slack channels or GitHub issues/discussions 3. Contributions to community health and project's sustainability for the long-term 4. Understands contributor/committer guidelines: - `Contributors' Guide `__ + `Contributors' Guide `__ Code contribution diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 1a480e2c0ade2..c51161c7fdcc6 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -29,7 +29,7 @@ rules of that community. New Contributor --------------- -If you are a new contributor, please follow the `Contributors Quick Start `__ guide to get a gentle step-by-step introduction to setting up the development environment and making your first contribution. @@ -198,9 +198,9 @@ Step 2: Configure Your Environment ---------------------------------- You can use either a local virtual env or a Docker-based env. The differences -between the two are explained `here `_. +between the two are explained `here `_. -The local env's instructions can be found in full in the `LOCAL_VIRTUALENV.rst `_ file. +The local env's instructions can be found in full in the `LOCAL_VIRTUALENV.rst `_ file. The Docker env is here to maintain a consistent and common development environment so that you can replicate CI failures locally and work on solving them locally rather by pushing to CI. You can configure the Docker-based Breeze development environment as follows: @@ -261,24 +261,24 @@ Step 4: Prepare PR * Read about `email configuration in Airflow `__. * Find the class you should modify. For the example GitHub issue, - this is `email.py `__. + this is `email.py `__. * Find the test class where you should add tests. For the example ticket, - this is `test_email.py `__. + this is `test_email.py `__. - * Make sure your fork's master is synced with Apache Airflow's master before you create a branch. See + * Make sure your fork's main is synced with Apache Airflow's main before you create a branch. See `How to sync your fork <#how-to-sync-your-fork>`_ for details. * Create a local branch for your development. Make sure to use latest - ``apache/master`` as base for the branch. See `How to Rebase PR <#how-to-rebase-pr>`_ for some details + ``apache/main`` as base for the branch. See `How to Rebase PR <#how-to-rebase-pr>`_ for some details on setting up the ``apache`` remote. Note, some people develop their changes directly in their own - ``master`` branches - this is OK and you can make PR from your master to ``apache/master`` but we + ``main`` branches - this is OK and you can make PR from your main to ``apache/main`` but we recommend to always create a local branch for your development. This allows you to easily compare changes, have several changes that you work on at the same time and many more. - If you have ``apache`` set as remote then you can make sure that you have latest changes in your master - by ``git pull apache master`` when you are in the local ``master`` branch. If you have conflicts and - want to override your locally changed master you can override your local changes with - ``git fetch apache; git reset --hard apache/master``. + If you have ``apache`` set as remote then you can make sure that you have latest changes in your main + by ``git pull apache main`` when you are in the local ``main`` branch. If you have conflicts and + want to override your locally changed main you can override your local changes with + ``git fetch apache; git reset --hard apache/main``. * Modify the class and add necessary code and unit tests. @@ -395,11 +395,11 @@ these guidelines: Airflow Git Branches ==================== -All new development in Airflow happens in the ``master`` branch. All PRs should target that branch. +All new development in Airflow happens in the ``main`` branch. All PRs should target that branch. We also have a ``v2-*-test`` branches that are used to test ``2.*.x`` series of Airflow and where committers -cherry-pick selected commits from the master branch. +cherry-pick selected commits from the main branch. Cherry-picking is done with the ``-x`` flag. @@ -422,7 +422,7 @@ time when they converge. The production images are build in DockerHub from: -* master branch for development +* main branch for development * v2-*-test branches for testing 2.*.x release * ``2.*.*``, ``2.*.*rc*`` releases from the ``v2-*-stable`` branch when we prepare release candidates and final releases. There are no production images prepared from v2-*-stable branch. @@ -683,7 +683,7 @@ the providers are installed from PyPI, they provide the entry-point containing t in the previous chapter. However when they are locally developed, together with Airflow, the mechanism of discovery of the providers is based on ``provider.yaml`` file that is placed in the top-folder of the provider. Similarly as in case of the ``provider.yaml`` file is compliant with the -`json-schema specification `_. +`json-schema specification `_. Thanks to that mechanism, you can develop community managed providers in a seamless way directly from Airflow sources, without preparing and releasing them as packages. This is achieved by: @@ -804,7 +804,7 @@ There are several sets of constraints we keep: * "constraints-source-providers" - those are constraints generated by using providers installed from current sources. While adding new providers their dependencies might change, so this set of providers - is the current set of the constraints for airflow and providers from the current master sources. + is the current set of the constraints for airflow and providers from the current main sources. Those providers are used by CI system to keep "stable" set of constraints. Thet are named ``constraints-source-providers-.txt`` @@ -820,7 +820,7 @@ It can be done from the sources: .. code-block:: bash pip install -e . \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.6.txt" or from the PyPI package: @@ -828,7 +828,7 @@ or from the PyPI package: .. code-block:: bash pip install apache-airflow \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.6.txt" This works also with extras - for example: @@ -836,7 +836,7 @@ This works also with extras - for example: .. code-block:: bash pip install .[ssh] \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.6.txt" As of apache-airflow 1.10.12 it is also possible to use constraints directly from GitHub using specific @@ -857,7 +857,7 @@ If you want to update just airflow dependencies, without paying attention to pro .. code-block:: bash pip install . --upgrade \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-no-providers-3.6.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.6.txt" The ``constraints-.txt`` and ``constraints-no-providers-.txt`` @@ -868,7 +868,7 @@ Manually generating constraint files ------------------------------------ The constraint files are generated automatically by the CI job. Sometimes however it is needed to regenerate -them manually (committers only). For example when master build did not succeed for quite some time). +them manually (committers only). For example when main build did not succeed for quite some time). This can be done by running this (it utilizes parallel preparation of the constraints): .. code-block:: bash @@ -894,7 +894,7 @@ generated files: .. code-block:: bash - cd + cd git pull cp ${AIRFLOW_SOURCES}/files/constraints-*/constraints*.txt . git diff @@ -1221,14 +1221,14 @@ commands: How to sync your fork ===================== -When you have your fork, you should periodically synchronize the master of your fork with the -Apache Airflow master. In order to do that you can ``git pull --rebase`` to your local git repository from -apache remote and push the master (often with ``--force`` to your fork). There is also an easy -way using ``Force sync master from apache/airflow`` workflow. You can go to "Actions" in your repository and +When you have your fork, you should periodically synchronize the main of your fork with the +Apache Airflow main. In order to do that you can ``git pull --rebase`` to your local git repository from +apache remote and push the main (often with ``--force`` to your fork). There is also an easy +way using ``Force sync main from apache/airflow`` workflow. You can go to "Actions" in your repository and choose the workflow and manually trigger the workflow using "Run workflow" command. -This will force-push the master from apache/airflow to the master in your fork. Note that in case you -modified the master in your fork, you might loose those changes. +This will force-push the main from apache/airflow to the main in your fork. Note that in case you +modified the main in your fork, you might loose those changes. How to rebase PR @@ -1239,7 +1239,7 @@ providing a better alternative to the merge workflow. We've therefore written a As opposed to the merge workflow, the rebase workflow allows us to clearly separate your changes from the changes of others. It puts the responsibility of rebasing on the -author of the change. It also produces a "single-line" series of commits on the master branch. This +author of the change. It also produces a "single-line" series of commits on the main branch. This makes it easier to understand what was going on and to find reasons for problems (it is especially useful for "bisecting" when looking for a commit that introduced some bugs). @@ -1247,9 +1247,9 @@ First of all, we suggest you read about the rebase workflow here: `Merging vs. rebasing `_. This is an excellent article that describes all the ins/outs of the rebase workflow. I recommend keeping it for future reference. -The goal of rebasing your PR on top of ``apache/master`` is to "transplant" your change on top of +The goal of rebasing your PR on top of ``apache/main`` is to "transplant" your change on top of the latest changes that are merged by others. It also allows you to fix all the conflicts -that arise as a result of other people changing the same files as you and merging the changes to ``apache/master``. +that arise as a result of other people changing the same files as you and merging the changes to ``apache/main``. Here is how rebase looks in practice (you can find a summary below these detailed steps): @@ -1261,7 +1261,7 @@ as "apache" so you can refer to it easily: * If you use ssh: ``git remote add apache git@github.com:apache/airflow.git`` * If you use https: ``git remote add apache https://github.com/apache/airflow.git`` -2. You then need to make sure that you have the latest master fetched from the ``apache`` repository. You can do this +2. You then need to make sure that you have the latest main fetched from the ``apache`` repository. You can do this via: ``git fetch apache`` (to fetch apache remote) @@ -1271,7 +1271,7 @@ as "apache" so you can refer to it easily: 3. Assuming that your feature is in a branch in your repository called ``my-branch`` you can easily check what is the base commit you should rebase from by: - ``git merge-base my-branch apache/master`` + ``git merge-base my-branch apache/main`` This will print the HASH of the base commit which you should use to rebase your feature from. For example: ``5abce471e0690c6b8d06ca25685b0845c5fd270f``. Copy that HASH and go to the next step. @@ -1296,11 +1296,11 @@ as "apache" so you can refer to it easily: 5. Rebase: - ``git rebase HASH --onto apache/master`` + ``git rebase HASH --onto apache/main`` For example: - ``git rebase 5abce471e0690c6b8d06ca25685b0845c5fd270f --onto apache/master`` + ``git rebase 5abce471e0690c6b8d06ca25685b0845c5fd270f --onto apache/main`` 6. If you have no conflicts - that's cool. You rebased. You can now run ``git push --force-with-lease`` to push your changes to your repository. That should trigger the build in our CI if you have a @@ -1333,9 +1333,9 @@ Summary Useful when you understand the flow but don't remember the steps and want a quick reference. ``git fetch --all`` -``git merge-base my-branch apache/master`` +``git merge-base my-branch apache/main`` ``git checkout my-branch`` -``git rebase HASH --onto apache/master`` +``git rebase HASH --onto apache/main`` ``git push --force-with-lease`` How to communicate @@ -1372,7 +1372,7 @@ You can join the channels via links at the `Airflow Community page `_ as stated in `Contribution Workflow Example `_ +We don't create new issues on JIRA anymore. The reason we still look at JIRA issues is that there are valuable tickets inside of it. However, each new PR should be created on `GitHub issues `_ as stated in `Contribution Workflow Example `_ * The `Apache Airflow Slack `_ for: * ad-hoc questions related to development (#development channel) diff --git a/CONTRIBUTORS_QUICK_START.rst b/CONTRIBUTORS_QUICK_START.rst index ea5f547f16aab..0b25b0cd5146d 100644 --- a/CONTRIBUTORS_QUICK_START.rst +++ b/CONTRIBUTORS_QUICK_START.rst @@ -300,8 +300,8 @@ Using Breeze Use CI image. - Branch name: master - Docker image: apache/airflow:master-python3.8-ci + Branch name: main + Docker image: apache/airflow:main-python3.8-ci Airflow source version: 2.0.0b2 Python version: 3.8 DockerHub user: apache @@ -408,7 +408,7 @@ For more information visit : |Breeze documentation| .. |Breeze documentation| raw:: html - Breeze documentation + Breeze documentation Following are some of important topics of Breeze documentation: @@ -417,7 +417,7 @@ Following are some of important topics of Breeze documentation: .. |Choosing different Breeze environment configuration| raw:: html - Choosing different Breeze environment configuration @@ -425,7 +425,7 @@ Following are some of important topics of Breeze documentation: .. |Troubleshooting Breeze environment| raw:: html - Troubleshooting + Troubleshooting Breeze environment @@ -433,7 +433,7 @@ Following are some of important topics of Breeze documentation: .. |Installing Additional tools to the Docker Image| raw:: html - Installing + Installing Additional tools to the Docker Image @@ -441,7 +441,7 @@ Following are some of important topics of Breeze documentation: .. |Internal details of Breeze| raw:: html - + Internal details of Breeze @@ -449,7 +449,7 @@ Following are some of important topics of Breeze documentation: .. |Breeze Command-Line Interface Reference| raw:: html - Breeze Command-Line Interface Reference @@ -457,7 +457,7 @@ Following are some of important topics of Breeze documentation: .. |Cleaning the environment| raw:: html - + Cleaning the environment @@ -465,7 +465,7 @@ Following are some of important topics of Breeze documentation: .. |Other uses of the Airflow Breeze environment| raw:: html - Other uses of the Airflow Breeze environment @@ -647,7 +647,7 @@ All Tests are inside ./tests directory. .. |TESTING.rst| raw:: html - TESTING.rst + TESTING.rst - Following are the some of important topics of TESTING.rst @@ -655,7 +655,7 @@ All Tests are inside ./tests directory. .. |Airflow Test Infrastructure| raw:: html - + Airflow Test Infrastructure @@ -663,7 +663,7 @@ All Tests are inside ./tests directory. .. |Airflow Unit Tests| raw:: html - Airflow Unit + Airflow Unit Tests @@ -671,7 +671,7 @@ All Tests are inside ./tests directory. .. |Helm Unit Tests| raw:: html - Helm Unit Tests + Helm Unit Tests @@ -679,7 +679,7 @@ All Tests are inside ./tests directory. .. |Airflow Integration Tests| raw:: html - + Airflow Integration Tests @@ -687,7 +687,7 @@ All Tests are inside ./tests directory. .. |Running Tests with Kubernetes| raw:: html - + Running Tests with Kubernetes @@ -695,7 +695,7 @@ All Tests are inside ./tests directory. .. |Airflow System Tests| raw:: html - Airflow + Airflow System Tests @@ -703,7 +703,7 @@ All Tests are inside ./tests directory. .. |Local and Remote Debugging in IDE| raw:: html - Local and Remote Debugging in IDE @@ -711,7 +711,7 @@ All Tests are inside ./tests directory. .. |BASH Unit Testing (BATS)| raw:: html - + BASH Unit Testing (BATS) @@ -845,7 +845,7 @@ To avoid burden on CI infrastructure and to save time, Pre-commit hooks can be r .. |STATIC_CODE_CHECKS.rst| raw:: html - + STATIC_CODE_CHECKS.rst - Following are some of the important links of STATIC_CODE_CHECKS.rst @@ -854,14 +854,14 @@ To avoid burden on CI infrastructure and to save time, Pre-commit hooks can be r .. |Pre-commit Hooks| raw:: html - + Pre-commit Hooks - |Pylint Static Code Checks| .. |Pylint Static Code Checks| raw:: html - Pylint Static Code Checks @@ -869,7 +869,7 @@ To avoid burden on CI infrastructure and to save time, Pre-commit hooks can be r .. |Running Static Code Checks via Breeze| raw:: html - Running Static Code Checks via Breeze @@ -883,7 +883,7 @@ Contribution guide .. |CONTRIBUTING.rst| raw:: html - CONTRIBUTING.rst + CONTRIBUTING.rst - Following are some of important links of CONTRIBUTING.rst @@ -891,7 +891,7 @@ Contribution guide .. |Types of contributions| raw:: html - + Types of contributions @@ -899,7 +899,7 @@ Contribution guide .. |Roles of contributor| raw:: html - Roles of + Roles of contributor @@ -907,7 +907,7 @@ Contribution guide .. |Workflow for a contribution| raw:: html - + Workflow for a contribution @@ -948,7 +948,7 @@ Syncing Fork and rebasing Pull request Often it takes several days or weeks to discuss and iterate with the PR until it is ready to merge. In the meantime new commits are merged, and you might run into conflicts, therefore you should periodically -synchronize master in your fork with the ``apache/airflow`` master and rebase your PR on top of it. Following +synchronize main in your fork with the ``apache/airflow`` main and rebase your PR on top of it. Following describes how to do it. @@ -956,7 +956,7 @@ describes how to do it. .. |Syncing fork| raw:: html - + Update new changes made to apache:airflow project to your fork @@ -964,5 +964,5 @@ describes how to do it. .. |Rebasing pull request| raw:: html - + Rebasing pull request diff --git a/Dockerfile b/Dockerfile index 25678af2e8652..0d872acd7003f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -133,7 +133,7 @@ RUN mkdir -pv /usr/share/man/man1 \ ARG INSTALL_MYSQL_CLIENT="true" ARG AIRFLOW_REPO=apache/airflow -ARG AIRFLOW_BRANCH=master +ARG AIRFLOW_BRANCH=main ARG AIRFLOW_EXTRAS ARG ADDITIONAL_AIRFLOW_EXTRAS="" # Allows to override constraints source @@ -141,7 +141,7 @@ ARG CONSTRAINTS_GITHUB_REPOSITORY="apache/airflow" ARG AIRFLOW_CONSTRAINTS="constraints" ARG AIRFLOW_CONSTRAINTS_REFERENCE="" ARG AIRFLOW_CONSTRAINTS_LOCATION="" -ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-master" +ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-main" ARG AIRFLOW_PIP_VERSION # By default PIP has progress bar but you can disable it. ARG PIP_PROGRESS_BAR @@ -206,7 +206,7 @@ ENV AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES} \ AIRFLOW_SOURCES_FROM=${AIRFLOW_SOURCES_FROM} \ AIRFLOW_SOURCES_TO=${AIRFLOW_SOURCES_TO} -# In case of Production build image segment we want to pre-install master version of airflow +# In case of Production build image segment we want to pre-install main version of airflow # dependencies from GitHub so that we do not have to always reinstall it from the scratch. # The Airflow (and providers in case INSTALL_PROVIDERS_FROM_SOURCES is "false") # are uninstalled, only dependencies remain diff --git a/Dockerfile.ci b/Dockerfile.ci index 4b094c9c9a83e..14fe7532c7bbc 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -190,7 +190,7 @@ RUN curl -sSL https://github.com/bats-core/bats-core/archive/v${BATS_VERSION}.ta && tar -zxf /tmp/bats-file.tgz -C /opt/bats/lib/bats-file --strip 1 && rm -rf /tmp/* ARG AIRFLOW_REPO=apache/airflow -ARG AIRFLOW_BRANCH=master +ARG AIRFLOW_BRANCH=main # Airflow Extras installed ARG AIRFLOW_EXTRAS="all" ARG ADDITIONAL_AIRFLOW_EXTRAS="" @@ -199,7 +199,7 @@ ARG CONSTRAINTS_GITHUB_REPOSITORY="apache/airflow" ARG AIRFLOW_CONSTRAINTS="constraints" ARG AIRFLOW_CONSTRAINTS_REFERENCE="" ARG AIRFLOW_CONSTRAINTS_LOCATION="" -ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-master" +ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-main" # By changing the CI build epoch we can force reinstalling Airflow and pip all dependencies # It can also be overwritten manually by setting the AIRFLOW_CI_BUILD_EPOCH environment variable. ARG AIRFLOW_CI_BUILD_EPOCH="3" @@ -262,7 +262,7 @@ ARG UPGRADE_TO_NEWER_DEPENDENCIES="false" ENV EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS} \ UPGRADE_TO_NEWER_DEPENDENCIES=${UPGRADE_TO_NEWER_DEPENDENCIES} -# In case of CI builds we want to pre-install master version of airflow dependencies so that +# In case of CI builds we want to pre-install main version of airflow dependencies so that # We do not have to always reinstall it from the scratch. # And is automatically reinstalled from the scratch every time patch release of python gets released # The Airflow (and providers in case INSTALL_PROVIDERS_FROM_SOURCES is "false") diff --git a/IMAGES.rst b/IMAGES.rst index 9e9e469853de7..6b4ca3117db92 100644 --- a/IMAGES.rst +++ b/IMAGES.rst @@ -76,8 +76,8 @@ And for production images with ``latest`` tag: where: -* ``BRANCH_OR_TAG`` - branch or tag used when creating the image. Examples: ``master``, - ``v2-1-test``, ``2.1.0``. The ``master``, ``v2-*-test`` labels are +* ``BRANCH_OR_TAG`` - branch or tag used when creating the image. Examples: ``main``, + ``v2-1-test``, ``2.1.0``. The ``main``, ``v2-*-test`` labels are built from branches so they change over time. The ``2.*.*`` labels are built from git tags and they are "fixed" once built. * ``PYTHON_MAJOR_MINOR_VERSION`` - version of Python used to build the image. Examples: ``3.6``, ``3.7``, @@ -193,13 +193,13 @@ This will build the image using command similar to: You can also build production images from specific Git version via providing ``--install-airflow-reference`` -parameter to Breeze (this time constraints are taken from the ``constraints-master`` branch which is the +parameter to Breeze (this time constraints are taken from the ``constraints-main`` branch which is the HEAD of development for constraints): .. code-block:: bash pip install "https://github.com/apache/airflow/archive/.tar.gz#egg=apache-airflow" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.6.txt" You can also skip installing airflow and install it from locally provided files by using ``--install-from-docker-context-files`` parameter and ``--disable-pypi-when-building`` to Breeze: @@ -292,10 +292,10 @@ For example: .. code-block:: bash - apache/airflow-ci:master-python3.6 - production "master" image from current master - apache/airflow-ci:master-python3.6-ci - CI "master" image from current master - apache/airflow-ci:v2-1-test-python3.6-ci - CI "master" image from current v2-1-test branch - apache/airflow:python3.6-master - base Python image for the master branch + apache/airflow-ci:main-python3.6 - production "main" image from current main + apache/airflow-ci:main-python3.6-ci - CI "main" image from current main + apache/airflow-ci:v2-1-test-python3.6-ci - CI "main" image from current v2-1-test branch + apache/airflow:python3.6-main - base Python image for the main branch You can see those CI DockerHub images at ``_ @@ -327,7 +327,7 @@ By default DockerHub registry is used when you push or pull such images. However for CI builds we keep the images in GitHub registry as well - this way we can easily push the images automatically after merge requests and use such images for Pull Requests as cache - which makes it much it much faster for CI builds (images are available in cache -right after merged request in master finishes it's build), The difference is visible especially if +right after merged request in main finishes it's build), The difference is visible especially if significant changes are done in the Dockerfile.CI. The images are named differently (in Docker definition of image names - registry URL is part of the @@ -355,7 +355,7 @@ Images with a commit SHA (built for pull requests and pushes) docker.pkg.github.com/apache-airflow/-pythonX.Y-build-v2: - for production build stage docker.pkg.github.com/apache-airflow/python-v2:X.Y-slim-buster- - for base Python images -Latest images (pushed when master merge succeeds): +Latest images (pushed when main merge succeeds): .. code-block:: bash @@ -377,7 +377,7 @@ Images with a commit SHA (built for pull requests and pushes) ghcr.io/apache/airflow--pythonX.Y-build-v2: - for production build stage ghcr.io/apache/airflow-python-v2:X.Y-slim-buster- - for base Python images -Latest images (pushed when master merge succeeds): +Latest images (pushed when main merge succeeds): .. code-block:: bash @@ -565,7 +565,7 @@ The following build arguments (``--build-arg`` in docker build command) can be u | ``AIRFLOW_REPO`` | ``apache/airflow`` | the repository from which PIP | | | | dependencies are pre-installed | +------------------------------------------+------------------------------------------+------------------------------------------+ -| ``AIRFLOW_BRANCH`` | ``master`` | the branch from which PIP dependencies | +| ``AIRFLOW_BRANCH`` | ``main`` | the branch from which PIP dependencies | | | | are pre-installed | +------------------------------------------+------------------------------------------+------------------------------------------+ | ``AIRFLOW_CI_BUILD_EPOCH`` | ``1`` | increasing this value will reinstall PIP | @@ -590,7 +590,7 @@ The following build arguments (``--build-arg`` in docker build command) can be u | ``AIRFLOW_CONSTRAINTS_REFERENCE`` | | reference (branch or tag) from GitHub | | | | repository from which constraints are | | | | used. By default it is set to | -| | | ``constraints-master`` but can be | +| | | ``constraints-main`` but can be | | | | ``constraints-2-0`` for 2.0.* versions | | | | or it could point to specific version | | | | for example ``constraints-2.0.0`` | @@ -714,12 +714,12 @@ way of querying image details via API. You really need to download the image to We workaround it in the way that always when we build the image we build a very small image manifest containing randomly generated UUID and push it to registry together with the main CI image. The tag for the manifest image reflects the image it refers to with added ``-manifest`` suffix. -The manifest image for ``apache/airflow:master-python3.6-ci`` is named -``apache/airflow:master-python3.6-ci-manifest``. +The manifest image for ``apache/airflow:main-python3.6-ci`` is named +``apache/airflow:main-python3.6-ci-manifest``. The image is quickly pulled (it is really, really small) when important files change and the content of the randomly generated UUID is compared with the one in our image. If the contents are different -this means that the user should rebase to latest master and rebuild the image with pulling the image from +this means that the user should rebase to latest main and rebuild the image with pulling the image from the repo as this will likely be faster than rebuilding the image locally. The random UUID is generated right after pre-cached pip install is run - and usually it means that diff --git a/INSTALL b/INSTALL index 50ac1500fd638..ced87c9460560 100644 --- a/INSTALL +++ b/INSTALL @@ -40,21 +40,21 @@ python setup.py install # There are different constraint files for different python versions. For example" pip install . \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.6.txt" By default `pip install` in Airflow 2.0 installs only the provider packages that are needed by the extras and install them as packages from PyPI rather than from local sources: pip install .[google,amazon] \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.6.txt" You can upgrade just airflow, without paying attention to provider's dependencies by using 'no-providers' constraint files. This allows you to keep installed provider packages. pip install . --upgrade \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-no-providers-3.6.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.6.txt" You can also install airflow in "editable mode" (with -e) flag and then provider packages are @@ -69,7 +69,7 @@ and in ``CONTRIBUTING.rst`` for developing community maintained providers. This is useful if you want to develop providers: pip install -e . \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.6.txt" You can also skip installing provider packages from PyPI by setting INSTALL_PROVIDERS_FROM_SOURCE to "true". In this case Airflow will be installed in non-editable mode with all providers installed from the sources. @@ -77,13 +77,13 @@ Additionally `provider.yaml` files will also be copied to providers folders whic discoverable by Airflow even if they are not installed from packages in this case. INSTALL_PROVIDERS_FROM_SOURCES="true" pip install . \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.6.txt" Airflow can be installed with extras to install some additional features (for example 'async' or 'doc' or to install automatically providers and all dependencies needed by that provider: pip install .[async,google,amazon] \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.6.txt" The list of available extras: diff --git a/LOCAL_VIRTUALENV.rst b/LOCAL_VIRTUALENV.rst index 15f828d462791..f97f89a821cf9 100644 --- a/LOCAL_VIRTUALENV.rst +++ b/LOCAL_VIRTUALENV.rst @@ -151,7 +151,7 @@ for different python versions: .. code-block:: bash pip install -e ".[devel,]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.6.txt" This will install Airflow in 'editable' mode - where sources of Airflow are taken directly from the source @@ -164,7 +164,7 @@ You can also install Airflow in non-editable mode: .. code-block:: bash pip install ".[devel,]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.6.txt" This will copy the sources to directory where usually python packages are installed. You can see the list of directories via ``python -m site`` command. In this case the providers are installed from PyPI, not from @@ -173,7 +173,7 @@ sources, unless you set ``INSTALL_PROVIDERS_FROM_SOURCES`` environment variable .. code-block:: bash INSTALL_PROVIDERS_FROM_SOURCES="true" pip install ".[devel,]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.6.txt" Note: when you first initialize database (the next step), you may encounter some problems. @@ -231,7 +231,7 @@ before running ``pip install`` command: .. code-block:: bash INSTALL_PROVIDERS_FROM_SOURCES="true" pip install -U -e ".[devel,]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.6.txt" This way no providers packages will be installed and they will always be imported from the "airflow/providers" folder. diff --git a/PULL_REQUEST_WORKFLOW.rst b/PULL_REQUEST_WORKFLOW.rst index 37444bafc77e2..96cc5b3f09514 100644 --- a/PULL_REQUEST_WORKFLOW.rst +++ b/PULL_REQUEST_WORKFLOW.rst @@ -50,7 +50,7 @@ We approached the problem by: the builds can complete in < 2 minutes) but also by limiting the number of tests executed in PRs that do not touch the "core" of Airflow, or only touching some - standalone - parts of Airflow such as "Providers", "WWW" or "CLI". This solution is not yet perfect as there are likely some edge cases but - it is easy to maintain and we have an escape-hatch - all the tests are always executed in master pushes, + it is easy to maintain and we have an escape-hatch - all the tests are always executed in main pushes, so contributors can easily spot if there is a "missed" case and fix it - both by fixing the problem and adding those exceptions to the code. More about it can be found in the `Selective CI checks <#selective-ci-checks>`_ chapter. @@ -126,7 +126,7 @@ The logic implemented for the changes works as follows: 1) In case of direct push (so when PR gets merged) or scheduled run, we always run all tests and checks. This is in order to make sure that the merge did not miss anything important. The remainder of the logic is executed only in case of Pull Requests. We do not add providers tests in case DEFAULT_BRANCH is - different than master, because providers are only important in master branch and PRs to master branch. + different than main, because providers are only important in main branch and PRs to main branch. 2) We retrieve which files have changed in the incoming Merge Commit (github.sha is a merge commit automatically prepared by GitHub in case of Pull Request, so we can retrieve the list of changed @@ -135,8 +135,8 @@ The logic implemented for the changes works as follows: 3) If any of the important, environment files changed (Dockerfile, ci scripts, setup.py, GitHub workflow files), then we again run all tests and checks. Those are cases where the logic of the checks changed or the environment for the checks changed so we want to make sure to check everything. We do not add - providers tests in case DEFAULT_BRANCH is different than master, because providers are only - important in master branch and PRs to master branch. + providers tests in case DEFAULT_BRANCH is different than main, because providers are only + important in main branch and PRs to main branch. 4) If any of py files changed: we need to have CI image and run full static checks so we enable image building @@ -160,7 +160,7 @@ The logic implemented for the changes works as follows: b) if any of the Airflow API files changed we enable ``API`` test type c) if any of the Airflow CLI files changed we enable ``CLI`` test type and Kubernetes tests (the K8S tests depend on CLI changes as helm chart uses CLI to run Airflow). - d) if this is a master branch and if any of the Provider files changed we enable ``Providers`` test type + d) if this is a main branch and if any of the Provider files changed we enable ``Providers`` test type e) if any of the WWW files changed we enable ``WWW`` test type f) if any of the Kubernetes files changed we enable ``Kubernetes`` test type g) Then we subtract count of all the ``specific`` above per-type changed files from the count of @@ -184,7 +184,7 @@ The logic implemented for the changes works as follows: Similarly to selective tests we also run selective security scans. In Pull requests, the Python scan will only run when there is a python code change and JavaScript scan will only run if -there is a JavaScript or yarn.lock file change. For master builds, all scans are always executed. +there is a JavaScript or yarn.lock file change. For main builds, all scans are always executed. The selective check algorithm is shown here: diff --git a/README.md b/README.md index 2209b04d4f0c8..df467241fe4e2 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ [![PyPI version](https://badge.fury.io/py/apache-airflow.svg)](https://badge.fury.io/py/apache-airflow) [![GitHub Build](https://github.com/apache/airflow/workflows/CI%20Build/badge.svg)](https://github.com/apache/airflow/actions) -[![Coverage Status](https://img.shields.io/codecov/c/github/apache/airflow/master.svg)](https://codecov.io/github/apache/airflow?branch=master) +[![Coverage Status](https://img.shields.io/codecov/c/github/apache/airflow/main.svg)](https://codecov.io/github/apache/airflow?branch=main) [![License](https://img.shields.io/:license-Apache%202-blue.svg)](https://www.apache.org/licenses/LICENSE-2.0.txt) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/apache-airflow.svg)](https://pypi.org/project/apache-airflow/) [![Docker Pulls](https://img.shields.io/docker/pulls/apache/airflow.svg)](https://hub.docker.com/r/apache/airflow) @@ -78,7 +78,7 @@ Airflow is not a streaming solution, but it is often used to process real-time d Apache Airflow is tested with: -| | Master version (dev) | Stable version (2.0.2) | Previous version (1.10.15) | +| | Main version (dev) | Stable version (2.0.2) | Previous version (1.10.15) | | ------------ | ------------------------- | ------------------------ | ------------------------- | | Python | 3.6, 3.7, 3.8 | 3.6, 3.7, 3.8 | 2.7, 3.5, 3.6, 3.7, 3.8 | | Kubernetes | 1.20, 1.19, 1.18 | 1.20, 1.19, 1.18 | 1.18, 1.17, 1.16 | @@ -101,9 +101,9 @@ They are based on the official release schedule of Python and Kubernetes, nicely [Kubernetes version skew policy](https://kubernetes.io/docs/setup/release/version-skew-policy/). 1. We drop support for Python and Kubernetes versions when they reach EOL. We drop support for those - EOL versions in master right after EOL date, and it is effectively removed when we release the + EOL versions in main right after EOL date, and it is effectively removed when we release the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow - For example for Python 3.6 it means that we drop support in master right after 23.12.2021, and the first + For example for Python 3.6 it means that we drop support in main right after 23.12.2021, and the first MAJOR or MINOR version of Airflow released after will not have it. 2. The "oldest" supported version of Python/Kubernetes is the default one. "Default" is only meaningful @@ -112,7 +112,7 @@ They are based on the official release schedule of Python and Kubernetes, nicely are both Python 3.6 images, however the first MINOR/MAJOR release of Airflow release after 23.12.2021 will become Python 3.7 images. -3. We support a new version of Python/Kubernetes in master after they are officially released, as soon as we +3. We support a new version of Python/Kubernetes in main after they are officially released, as soon as we make them work in our CI pipeline (which might not be immediate due to dependencies catching up with new versions of Python mostly) we release a new images/support in Airflow based on the working CI setup. @@ -128,7 +128,7 @@ Visit the official Airflow website documentation (latest **stable** release) for [getting started](https://airflow.apache.org/docs/apache-airflow/stable/start/index.html), or walking through a more complete [tutorial](https://airflow.apache.org/docs/apache-airflow/stable/tutorial.html). -> Note: If you're looking for documentation for master branch (latest development branch): you can find it on [s.apache.org/airflow-docs](https://s.apache.org/airflow-docs/). +> Note: If you're looking for documentation for main branch (latest development branch): you can find it on [s.apache.org/airflow-docs](https://s.apache.org/airflow-docs/). For more information on Airflow Improvement Proposals (AIPs), visit the [Airflow Wiki](https://cwiki.apache.org/confluence/display/AIRFLOW/Airflow+Improvements+Proposals). @@ -145,7 +145,7 @@ if needed. This means that from time to time plain `pip install apache-airflow` produce unusable Airflow installation. In order to have repeatable installation, however, we also keep a set of "known-to-be-working" constraint -files in the orphan `constraints-master`, `constraints-2-0` branches. We keep those "known-to-be-working" +files in the orphan `constraints-main`, `constraints-2-0` branches. We keep those "known-to-be-working" constraints files separately per major/minor Python version. You can use them as constraint files when installing Airflow from PyPI. Note that you have to specify correct Airflow tag/version/branch and Python versions in the URL. @@ -245,12 +245,12 @@ following the ASF Policy. ## Contributing -Want to help build Apache Airflow? Check out our [contributing documentation](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst). +Want to help build Apache Airflow? Check out our [contributing documentation](https://github.com/apache/airflow/blob/main/CONTRIBUTING.rst). ## Who uses Apache Airflow? More than 400 organizations are using Apache Airflow -[in the wild](https://github.com/apache/airflow/blob/master/INTHEWILD.md). +[in the wild](https://github.com/apache/airflow/blob/main/INTHEWILD.md). ## Who Maintains Apache Airflow? @@ -258,7 +258,7 @@ Airflow is the work of the [community](https://github.com/apache/airflow/graphs/ but the [core committers/maintainers](https://people.apache.org/committers-by-project.html#airflow) are responsible for reviewing and merging PRs as well as steering conversation around new feature requests. If you would like to become a maintainer, please review the Apache Airflow -[committer requirements](https://github.com/apache/airflow/blob/master/COMMITTERS.rst#guidelines-to-become-an-airflow-committer). +[committer requirements](https://github.com/apache/airflow/blob/main/COMMITTERS.rst#guidelines-to-become-an-airflow-committer). ## Can I use the Apache Airflow logo in my presentation? diff --git a/TESTING.rst b/TESTING.rst index dd53a0629fd90..6cb76c69e0d9e 100644 --- a/TESTING.rst +++ b/TESTING.rst @@ -592,7 +592,7 @@ Deploying Airflow to the Kubernetes cluster created is also done via ``kind-clus The deploy command performs those steps: -1. It rebuilds the latest ``apache/airflow:master-pythonX.Y`` production images using the +1. It rebuilds the latest ``apache/airflow:main-pythonX.Y`` production images using the latest sources using local caching. It also adds example DAGs to the image, so that they do not have to be mounted inside. 2. Loads the image to the Kind Cluster using the ``kind load`` command. @@ -712,8 +712,8 @@ The typical session for tests with Kubernetes looks like follows: Use CI image. - Branch name: master - Docker image: apache/airflow:master-python3.7-ci + Branch name: main + Docker image: apache/airflow:main-python3.7-ci Airflow source version: 2.0.0.dev0 Python version: 3.7 @@ -754,8 +754,8 @@ The typical session for tests with Kubernetes looks like follows: Use CI image. - Branch name: master - Docker image: apache/airflow:master-python3.7-ci + Branch name: main + Docker image: apache/airflow:main-python3.7-ci Airflow source version: 2.0.0.dev0 Python version: 3.7 @@ -1296,7 +1296,7 @@ By default ``/files/dags`` folder is mounted from your local `` the directory used by airflow scheduler and webserver to scan dags for. You can place your dags there to test them. -The DAGs can be run in the master version of Airflow but they also work +The DAGs can be run in the main version of Airflow but they also work with older versions. To run the tests for Airflow 1.10.* series, you need to run Breeze with diff --git a/UPDATING.md b/UPDATING.md index 910b4893dc0d1..798ca471b0159 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -2925,7 +2925,7 @@ If you are logging to Google cloud storage, please see the [Google cloud platfor If you are using S3, the instructions should be largely the same as the Google cloud platform instructions above. You will need a custom logging config. The `REMOTE_BASE_LOG_FOLDER` configuration key in your airflow config has been removed, therefore you will need to take the following steps: -- Copy the logging configuration from [`airflow/config_templates/airflow_logging_settings.py`](https://github.com/apache/airflow/blob/master/airflow/config_templates/airflow_local_settings.py). +- Copy the logging configuration from [`airflow/config_templates/airflow_logging_settings.py`](https://github.com/apache/airflow/blob/main/airflow/config_templates/airflow_local_settings.py). - Place it in a directory inside the Python import path `PYTHONPATH`. If you are using Python 2.7, ensuring that any `__init__.py` files exist so that it is importable. - Update the config by setting the path of `REMOTE_BASE_LOG_FOLDER` explicitly in the config. The `REMOTE_BASE_LOG_FOLDER` key is not used anymore. - Set the `logging_config_class` to the filename and dict. For example, if you place `custom_logging_config.py` on the base of your `PYTHONPATH`, you will need to set `logging_config_class = custom_logging_config.LOGGING_CONFIG` in your config as Airflow 1.8. diff --git a/airflow/config_templates/config.yml b/airflow/config_templates/config.yml index dd2d48f61e1b5..9f7c64024e076 100644 --- a/airflow/config_templates/config.yml +++ b/airflow/config_templates/config.yml @@ -737,7 +737,7 @@ deprecated since version 2.0. Please consider using `the Stable REST API `__. For more information on migration, see - `UPDATING.md `_ + `UPDATING.md `_ version_added: 2.0.0 type: boolean example: ~ diff --git a/airflow/config_templates/default_airflow.cfg b/airflow/config_templates/default_airflow.cfg index f8e8588f5669a..1cdf8b44027c1 100644 --- a/airflow/config_templates/default_airflow.cfg +++ b/airflow/config_templates/default_airflow.cfg @@ -399,7 +399,7 @@ fail_fast = False # deprecated since version 2.0. Please consider using # `the Stable REST API `__. # For more information on migration, see -# `UPDATING.md `_ +# `UPDATING.md `_ enable_experimental_api = False # How to authenticate users of the API. See diff --git a/airflow/models/dagpickle.py b/airflow/models/dagpickle.py index e16c046e005de..aa56ce3e5884b 100644 --- a/airflow/models/dagpickle.py +++ b/airflow/models/dagpickle.py @@ -26,7 +26,7 @@ class DagPickle(Base): """ - Dags can originate from different places (user repos, master repo, ...) + Dags can originate from different places (user repos, main repo, ...) and also get executed in different places (different executors). This object represents a version of a DAG and becomes a source of truth for a BackfillJob execution. A pickle is a native python serialized object, diff --git a/airflow/providers/apache/beam/README.md b/airflow/providers/apache/beam/README.md index ea76b4777eaf4..ddc69a381436d 100644 --- a/airflow/providers/apache/beam/README.md +++ b/airflow/providers/apache/beam/README.md @@ -64,7 +64,7 @@ pip install apache-airflow-providers-apache-beam[google] In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.beam` provider are in the `airflow.providers.apache.beam` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) +in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/main/CONTRIBUTING.rst#naming-conventions-for-provider-packages) ## Operators @@ -72,9 +72,9 @@ in [Naming conventions for provider packages](https://github.com/apache/airflow/ ### New operators | New Airflow 2.0 operators: `airflow.providers.apache.beam` package | -|:-----------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.beam.BeamRunJavaPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/beam/operators/beam.py) | -| [operators.beam.BeamRunPythonPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/beam/operators/beam.py) | +|:---------------------------------------------------------------------------------------------------------------------------------------------| +| [operators.beam.BeamRunJavaPipelineOperator](https://github.com/apache/airflow/blob/main/airflow/providers/apache/beam/operators/beam.py) | +| [operators.beam.BeamRunPythonPipelineOperator](https://github.com/apache/airflow/blob/main/airflow/providers/apache/beam/operators/beam.py) | ## Hooks @@ -83,7 +83,7 @@ in [Naming conventions for provider packages](https://github.com/apache/airflow/ | New Airflow 2.0 hooks: `airflow.providers.apache.beam` package | |:-----------------------------------------------------------------------------------------------------------------| -| [hooks.beam.BeamHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/beam/hooks/beam.py) | +| [hooks.beam.BeamHook](https://github.com/apache/airflow/blob/main/airflow/providers/apache/beam/hooks/beam.py) | ## Releases diff --git a/airflow/providers/google/CHANGELOG.rst b/airflow/providers/google/CHANGELOG.rst index 4d2f5a19fbb22..9f7a45f8e29e4 100644 --- a/airflow/providers/google/CHANGELOG.rst +++ b/airflow/providers/google/CHANGELOG.rst @@ -156,29 +156,29 @@ Details are covered in the UPDATING.md files for each library, but there are som that you should pay attention to. -+-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ -| Library name | Previous constraints | Current constraints | Upgrade Documentation | -+=====================================================================================================+======================+=====================+=======================================================================================================================================+ -| `google-cloud-automl `_ | ``>=0.4.0,<2.0.0`` | ``>=2.1.0,<3.0.0`` | `Upgrading google-cloud-automl `_ | -+-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ -| `google-cloud-bigquery-datatransfer `_ | ``>=0.4.0,<2.0.0`` | ``>=3.0.0,<4.0.0`` | `Upgrading google-cloud-bigquery-datatransfer `_ | -+-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ -| `google-cloud-datacatalog `_ | ``>=0.5.0,<0.8`` | ``>=3.0.0,<4.0.0`` | `Upgrading google-cloud-datacatalog `_ | -+-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ -| `google-cloud-dataproc `_ | ``>=1.0.1,<2.0.0`` | ``>=2.2.0,<3.0.0`` | `Upgrading google-cloud-dataproc `_ | -+-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ -| `google-cloud-kms `_ | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-kms `_ | -+-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ -| `google-cloud-logging `_ | ``>=1.14.0,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-logging `_ | -+-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ -| `google-cloud-monitoring `_ | ``>=0.34.0,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-monitoring `_ | -+-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ -| `google-cloud-os-login `_ | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-os-login `_ | -+-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ -| `google-cloud-pubsub `_ | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-pubsub `_ | -+-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ -| `google-cloud-tasks `_ | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-task `_ | -+-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+-------------------------------------------------------------------------------------------------------------------------------------+ +| Library name | Previous constraints | Current constraints | Upgrade Documentation | ++=====================================================================================================+======================+=====================+=====================================================================================================================================+ +| `google-cloud-automl `_ | ``>=0.4.0,<2.0.0`` | ``>=2.1.0,<3.0.0`` | `Upgrading google-cloud-automl `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+-------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-bigquery-datatransfer `_ | ``>=0.4.0,<2.0.0`` | ``>=3.0.0,<4.0.0`` | `Upgrading google-cloud-bigquery-datatransfer `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+-------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-datacatalog `_ | ``>=0.5.0,<0.8`` | ``>=3.0.0,<4.0.0`` | `Upgrading google-cloud-datacatalog `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+-------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-dataproc `_ | ``>=1.0.1,<2.0.0`` | ``>=2.2.0,<3.0.0`` | `Upgrading google-cloud-dataproc `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+-------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-kms `_ | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-kms `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+-------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-logging `_ | ``>=1.14.0,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-logging `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+-------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-monitoring `_ | ``>=0.34.0,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-monitoring `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+-------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-os-login `_ | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-os-login `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+-------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-pubsub `_ | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-pubsub `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+-------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-tasks `_ | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-task `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+-------------------------------------------------------------------------------------------------------------------------------------+ The field names use the snake_case convention ````````````````````````````````````````````` diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_build.py b/airflow/providers/google/cloud/example_dags/example_cloud_build.py index b62f90fdf3e62..cfaf364eb4323 100644 --- a/airflow/providers/google/cloud/example_dags/example_cloud_build.py +++ b/airflow/providers/google/cloud/example_dags/example_cloud_build.py @@ -63,7 +63,7 @@ # [START howto_operator_create_build_from_repo_body] create_build_from_repo_body = { - "source": {"repoSource": {"repoName": GCP_SOURCE_REPOSITORY_NAME, "branchName": "master"}}, + "source": {"repoSource": {"repoName": GCP_SOURCE_REPOSITORY_NAME, "branchName": "main"}}, "steps": [ { "name": "gcr.io/cloud-builders/docker", diff --git a/airflow/providers/qubole/sensors/qubole.py b/airflow/providers/qubole/sensors/qubole.py index 044eb672d1f8a..c3230c359083b 100644 --- a/airflow/providers/qubole/sensors/qubole.py +++ b/airflow/providers/qubole/sensors/qubole.py @@ -72,7 +72,7 @@ class QuboleFileSensor(QuboleSensor): :param qubole_conn_id: Connection id which consists of qds auth_token :type qubole_conn_id: str :param data: a JSON object containing payload, whose presence needs to be checked - Check this `example `_ for sample payload structure. :type data: dict @@ -94,7 +94,7 @@ class QubolePartitionSensor(QuboleSensor): :param qubole_conn_id: Connection id which consists of qds auth_token :type qubole_conn_id: str :param data: a JSON object containing payload, whose presence needs to be checked. - Check this `example `_ for sample payload structure. :type data: dict diff --git a/airflow/providers/slack/operators/slack.py b/airflow/providers/slack/operators/slack.py index 1b88a801ff760..14af018d5c03c 100644 --- a/airflow/providers/slack/operators/slack.py +++ b/airflow/providers/slack/operators/slack.py @@ -129,7 +129,7 @@ def __init__( 'Here is a cat video instead\n' 'https://www.youtube.com/watch?v=J---aiyznGQ', icon_url: str = 'https://raw.githubusercontent.com/apache/' - 'airflow/master/airflow/www/static/pin_100.png', + 'airflow/main/airflow/www/static/pin_100.png', attachments: Optional[List] = None, blocks: Optional[List] = None, **kwargs, diff --git a/breeze b/breeze index 893ce17d32edf..0a05b625616fd 100755 --- a/breeze +++ b/breeze @@ -119,7 +119,7 @@ function breeze::setup_default_breeze_constants() { export FORCE_PULL_IMAGES="false" # By default we do not pull python base image. We should do that only when we run upgrade check in - # CI master and when we manually refresh the images to latest versions + # CI main and when we manually refresh the images to latest versions export FORCE_PULL_BASE_PYTHON_IMAGE="false" # Forward common host credentials to docker (gcloud, aws etc.). @@ -1831,7 +1831,7 @@ ${CMDNAME} generate-constraints [FLAGS] Generates pinned constraint files with all extras from setup.py. Those files are generated in files folder - separate files for different python version. Those constraint files when - pushed to orphan constraints-master, constraints-2-0 branches are used + pushed to orphan constraints-main, constraints-2-0 branches are used to generate repeatable CI builds as well as run repeatable production image builds and upgrades when you want to include installing or updating some of the released providers released at the time particular airflow version was released. You can use those @@ -2035,9 +2035,9 @@ ${FORMATTED_STATIC_CHECKS} '${CMDNAME} static-check mypy -- --files tests/core.py' '${CMDNAME} static-check mypy -- --all-files' - To check all files that differ between you current branch and master run: + To check all files that differ between you current branch and main run: - '${CMDNAME} static-check all -- --from-ref \$(git merge-base master HEAD) --to-ref HEAD' + '${CMDNAME} static-check all -- --from-ref \$(git merge-base main HEAD) --to-ref HEAD' To check all files that are in the HEAD commit run: @@ -2416,7 +2416,7 @@ ${FORMATTED_INSTALL_AIRFLOW_VERSIONS} -t, --install-airflow-reference INSTALL_AIRFLOW_REFERENCE Installs Airflow directly from reference in GitHub when building PROD image. - This can be a GitHub branch like master or v2-1-test, or a tag like 2.1.0a1. + This can be a GitHub branch like main or v2-1-test, or a tag like 2.1.0a1. --installation-method INSTALLATION_METHOD Method of installing Airflow in PROD image - either from the sources ('.') @@ -2727,7 +2727,7 @@ Build options: --use-github-registry flag) to build images. The pulled images will be used as cache. Those builds are usually faster than when ''--build-cache-local'' with the exception if the registry images are not yet updated. The DockerHub images are updated nightly and the - GitHub images are updated after merges to master so it might be that the images are still + GitHub images are updated after merges to main so it might be that the images are still outdated vs. the latest version of the Dockerfiles you are using. In this case, the ''--build-cache-local'' might be faster, especially if you iterate and change the Dockerfiles yourself. diff --git a/codecov.yml b/codecov.yml index 16ef1f68a90b7..f6697b2d30044 100644 --- a/codecov.yml +++ b/codecov.yml @@ -36,7 +36,6 @@ coverage: - "airflow" # advanced branches: - - master - main - v1-10-stable - v1-10-test @@ -54,7 +53,6 @@ coverage: base: auto # advanced branches: - - master - main - v1-10-stable - v1-10-test diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md index b73c5f0a0648a..c9f0947f5a934 100644 --- a/dev/README_RELEASE_AIRFLOW.md +++ b/dev/README_RELEASE_AIRFLOW.md @@ -144,10 +144,10 @@ branches: `vX-Y-test` and `vX-Y-stable` (for example with `2.1.0rc1` release you Search and replace all the vX-Y for previous branches (TODO: we should likely automate this a bit more) -Run script to re-tag images from the ``master`` branch to the ``vX-Y-test`` branch: +Run script to re-tag images from the ``main`` branch to the ``vX-Y-test`` branch: ```shell script - ./dev/retag_docker_images.py --source-branch master --target-branch ${BRANCH_PREFIX}-test + ./dev/retag_docker_images.py --source-branch main --target-branch ${BRANCH_PREFIX}-test ``` @@ -276,7 +276,7 @@ Only votes from PMC members are binding, but the release manager should encourag to test the release and vote with "(non-binding)". The test procedure for PMCs and Contributors who would like to test this RC are described in -https://github.com/apache/airflow/blob/master/dev/README.md#vote-and-verify-the-apache-airflow-release-candidate +https://github.com/apache/airflow/blob/main/dev/README.md#vote-and-verify-the-apache-airflow-release-candidate Please note that the version number excludes the `rcX` string, so it's now simply 2.0.2. This will allow us to rename the artifact without modifying diff --git a/dev/README_RELEASE_AIRFLOW_UPGRADE_CHECK.md b/dev/README_RELEASE_AIRFLOW_UPGRADE_CHECK.md index 850b384add272..b42ccf332b7f4 100644 --- a/dev/README_RELEASE_AIRFLOW_UPGRADE_CHECK.md +++ b/dev/README_RELEASE_AIRFLOW_UPGRADE_CHECK.md @@ -249,7 +249,7 @@ to test the release and vote with "(non-binding)". The test procedure for PMCs and Contributors who would like to test this RC are described in -, +, but again, this time it is a little bit different. To actually use this command you will need apache-airflow 1.10.15 diff --git a/dev/README_RELEASE_PROVIDER_PACKAGES.md b/dev/README_RELEASE_PROVIDER_PACKAGES.md index 16340e3a3b95c..8779ef3b6bd86 100644 --- a/dev/README_RELEASE_PROVIDER_PACKAGES.md +++ b/dev/README_RELEASE_PROVIDER_PACKAGES.md @@ -367,11 +367,11 @@ https://dist.apache.org/repos/dist/dev/airflow/providers/ Python "wheel" release. The test procedure for PMC members who would like to test the RC candidates are described in -https://github.com/apache/airflow/blob/master/dev/README_RELEASE_PROVIDER_PACKAGES.md#verify-the-release-by-pmc-members +https://github.com/apache/airflow/blob/main/dev/README_RELEASE_PROVIDER_PACKAGES.md#verify-the-release-by-pmc-members and for Contributors: -https://github.com/apache/airflow/blob/master/dev/README_RELEASE_PROVIDER_PACKAGES.md#verify-by-contributors +https://github.com/apache/airflow/blob/main/dev/README_RELEASE_PROVIDER_PACKAGES.md#verify-by-contributors Public keys are available at: diff --git a/dev/airflow-github b/dev/airflow-github index b4304bf005d04..bf895002060bf 100755 --- a/dev/airflow-github +++ b/dev/airflow-github @@ -82,10 +82,10 @@ def get_issue_type(issue): return issue_type -def get_commit_in_master_associated_with_pr(repo: git.Repo, issue: Issue) -> Optional[str]: +def get_commit_in_main_associated_with_pr(repo: git.Repo, issue: Issue) -> Optional[str]: """For a PR, find the associated merged commit & return its SHA""" if issue.pull_request: - commit = repo.git.log(f"--grep=#{issue.number}", "origin/master", "--format=%H") + commit = repo.git.log(f"--grep=#{issue.number}", "origin/main", "--format=%H") if commit: return commit else: @@ -165,7 +165,7 @@ def compare(target_version, github_token, previous_version=None, show_uncherrypi ) for issue in milestone_issues: - commit_in_master = get_commit_in_master_associated_with_pr(repo, issue) + commit_in_main = get_commit_in_main_associated_with_pr(repo, issue) status = get_issue_status(issue) # Checks if commit was cherrypicked into branch. @@ -186,9 +186,7 @@ def compare(target_version, github_token, previous_version=None, show_uncherrypi ) print( - formatstr.format( - **fields, merged=cherrypicked, commit=commit_in_master if commit_in_master else "" - ) + formatstr.format(**fields, merged=cherrypicked, commit=commit_in_main if commit_in_main else "") ) print( diff --git a/dev/provider_packages/README.md b/dev/provider_packages/README.md index 61b9b4ec12dca..abc4309d8816e 100644 --- a/dev/provider_packages/README.md +++ b/dev/provider_packages/README.md @@ -216,7 +216,7 @@ that any new added providers are not added as packages (in case they are not yet ```shell script INSTALL_PROVIDERS_FROM_SOURCES="true" pip install -e ".[devel_all]" \ - --constraint https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt + --constraint https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.6.txt ``` Note that you might need to add some extra dependencies to your system to install "devel_all" - many diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py index 6b711f59dcba1..530a8e727c570 100755 --- a/dev/provider_packages/prepare_provider_packages.py +++ b/dev/provider_packages/prepare_provider_packages.py @@ -84,7 +84,7 @@ """ HTTPS_REMOTE = "apache-https-for-providers" -HEAD_OF_HTTPS_REMOTE = f"{HTTPS_REMOTE}/master" +HEAD_OF_HTTPS_REMOTE = f"{HTTPS_REMOTE}/main" PROVIDER_TEMPLATE_PREFIX = "PROVIDER_" @@ -560,7 +560,7 @@ def convert_new_classes_to_table( from tabulate import tabulate headers = [f"New Airflow 2.0 {entity_type.value.lower()}: `{full_package_name}` package"] - table = [(get_class_code_link(full_package_name, class_name, "master"),) for class_name in new_entities] + table = [(get_class_code_link(full_package_name, class_name, "main"),) for class_name in new_entities] return tabulate(table, headers=headers, tablefmt="pipe") @@ -584,7 +584,7 @@ def convert_moved_classes_to_table( ] table = [ ( - get_class_code_link(full_package_name, to_class, "master"), + get_class_code_link(full_package_name, to_class, "main"), get_class_code_link("airflow", moved_entities[to_class], "v1-10-stable"), ) for to_class in sorted(moved_entities.keys()) diff --git a/dev/retag_docker_images.py b/dev/retag_docker_images.py index 711838aec70ae..2e834752de141 100755 --- a/dev/retag_docker_images.py +++ b/dev/retag_docker_images.py @@ -24,7 +24,7 @@ # new branch. This can be useful in a few situations: # # * when starting new release branch (for example `v2-1-test`) -# * when renaming a branch (for example `master->main`) +# * when renaming a branch # # Docker registries we are using: # @@ -42,10 +42,6 @@ # under apache/airflow-* but then we link it to the # project via docker image label. # -# The script helps to keep all the registries in-sync - copies -# `master` to `main` so that we can run it to test the rename and -# re-run it just before we switch the branches. - import subprocess from typing import List @@ -104,7 +100,7 @@ def pull_push_all_images( @click.option( "--target-dockerhub", type=str, default="apache/airflow-ci", help="Target repo [apache/airflow-ci]" ) -@click.option("--source-branch", type=str, default="master", help="Source branch name [master]") +@click.option("--source-branch", type=str, default="main", help="Source branch name [main") @click.option("--target-branch", type=str, default="main", help="Target branch name [main]") @click.option("--dockerhub/--no-dockerhub", default=True, help="Whether to synchronize DockerHub") @click.option("--registry/--no-registry", default=True, help="Whether to synchronize GitHub registry") diff --git a/docs/apache-airflow-providers-airbyte/index.rst b/docs/apache-airflow-providers-airbyte/index.rst index cf6671d69084e..48ca0e0814a54 100644 --- a/docs/apache-airflow-providers-airbyte/index.rst +++ b/docs/apache-airflow-providers-airbyte/index.rst @@ -38,7 +38,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. toctree:: diff --git a/docs/apache-airflow-providers-amazon/index.rst b/docs/apache-airflow-providers-amazon/index.rst index a2dcef07fe98b..5ab48239021fb 100644 --- a/docs/apache-airflow-providers-amazon/index.rst +++ b/docs/apache-airflow-providers-amazon/index.rst @@ -40,7 +40,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-apache-beam/index.rst b/docs/apache-airflow-providers-apache-beam/index.rst index e3e6a9ea41506..c3863a4a360c8 100644 --- a/docs/apache-airflow-providers-apache-beam/index.rst +++ b/docs/apache-airflow-providers-apache-beam/index.rst @@ -27,7 +27,7 @@ Content Python API <_api/airflow/providers/apache/beam/index> PyPI Repository - Example DAGs + Example DAGs .. toctree:: :maxdepth: 1 diff --git a/docs/apache-airflow-providers-apache-cassandra/index.rst b/docs/apache-airflow-providers-apache-cassandra/index.rst index 15f4289551e6b..cb7acb1025d2e 100644 --- a/docs/apache-airflow-providers-apache-cassandra/index.rst +++ b/docs/apache-airflow-providers-apache-cassandra/index.rst @@ -38,7 +38,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-apache-hive/index.rst b/docs/apache-airflow-providers-apache-hive/index.rst index ec706330a03e4..89ff1e66c41ce 100644 --- a/docs/apache-airflow-providers-apache-hive/index.rst +++ b/docs/apache-airflow-providers-apache-hive/index.rst @@ -38,7 +38,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-apache-kylin/index.rst b/docs/apache-airflow-providers-apache-kylin/index.rst index ea50df4e21be8..76e9b52f06385 100644 --- a/docs/apache-airflow-providers-apache-kylin/index.rst +++ b/docs/apache-airflow-providers-apache-kylin/index.rst @@ -32,7 +32,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-apache-livy/index.rst b/docs/apache-airflow-providers-apache-livy/index.rst index 6901b97be8e5a..14c6b15a94a81 100644 --- a/docs/apache-airflow-providers-apache-livy/index.rst +++ b/docs/apache-airflow-providers-apache-livy/index.rst @@ -31,7 +31,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-apache-pig/index.rst b/docs/apache-airflow-providers-apache-pig/index.rst index 82ba47ee138fd..c9a3f2a7e4d45 100644 --- a/docs/apache-airflow-providers-apache-pig/index.rst +++ b/docs/apache-airflow-providers-apache-pig/index.rst @@ -31,7 +31,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-apache-spark/index.rst b/docs/apache-airflow-providers-apache-spark/index.rst index 21b012260e197..32c352f62f4f5 100644 --- a/docs/apache-airflow-providers-apache-spark/index.rst +++ b/docs/apache-airflow-providers-apache-spark/index.rst @@ -38,7 +38,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-asana/index.rst b/docs/apache-airflow-providers-asana/index.rst index 54e1385b0f849..5790c1843088b 100644 --- a/docs/apache-airflow-providers-asana/index.rst +++ b/docs/apache-airflow-providers-asana/index.rst @@ -39,7 +39,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs .. toctree:: :maxdepth: 1 diff --git a/docs/apache-airflow-providers-cncf-kubernetes/index.rst b/docs/apache-airflow-providers-cncf-kubernetes/index.rst index 43fe198c9553c..8a52d8a8a30ca 100644 --- a/docs/apache-airflow-providers-cncf-kubernetes/index.rst +++ b/docs/apache-airflow-providers-cncf-kubernetes/index.rst @@ -39,7 +39,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-databricks/index.rst b/docs/apache-airflow-providers-databricks/index.rst index 503ab64222bd9..81a888b527cf1 100644 --- a/docs/apache-airflow-providers-databricks/index.rst +++ b/docs/apache-airflow-providers-databricks/index.rst @@ -39,7 +39,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-dingding/index.rst b/docs/apache-airflow-providers-dingding/index.rst index 7b56c242e99b5..d4e14cf623ce7 100644 --- a/docs/apache-airflow-providers-dingding/index.rst +++ b/docs/apache-airflow-providers-dingding/index.rst @@ -38,7 +38,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-docker/index.rst b/docs/apache-airflow-providers-docker/index.rst index 86d95973873d9..1978085057acf 100644 --- a/docs/apache-airflow-providers-docker/index.rst +++ b/docs/apache-airflow-providers-docker/index.rst @@ -33,7 +33,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-google/example-dags.rst b/docs/apache-airflow-providers-google/example-dags.rst index bbe5b9e47b53a..ad7f723abfe88 100644 --- a/docs/apache-airflow-providers-google/example-dags.rst +++ b/docs/apache-airflow-providers-google/example-dags.rst @@ -20,9 +20,9 @@ Example DAGS You can learn how to use Google integrations by analyzing the source code of the example DAGs: -* `Google Ads `__ -* `Google Cloud `__ -* `Google Firebase `__ -* `Google Marketing Platform `__ -* `Google Workplace `__ (formerly Google Suite) -* `Google LevelDB `__ +* `Google Ads `__ +* `Google Cloud `__ +* `Google Firebase `__ +* `Google Marketing Platform `__ +* `Google Workplace `__ (formerly Google Suite) +* `Google LevelDB `__ diff --git a/docs/apache-airflow-providers-google/operators/cloud/index.rst b/docs/apache-airflow-providers-google/operators/cloud/index.rst index b31065ad14902..5a36529237b80 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/index.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/index.rst @@ -29,4 +29,4 @@ Google Cloud Operators .. note:: You can learn how to use Google Cloud integrations by analyzing the - `source code `_ of the particular example DAGs. + `source code `_ of the particular example DAGs. diff --git a/docs/apache-airflow-providers-google/operators/marketing_platform/index.rst b/docs/apache-airflow-providers-google/operators/marketing_platform/index.rst index b4be3d893309c..ecd8cc5d307e9 100644 --- a/docs/apache-airflow-providers-google/operators/marketing_platform/index.rst +++ b/docs/apache-airflow-providers-google/operators/marketing_platform/index.rst @@ -29,4 +29,4 @@ Google Marketing Platform Operators .. note:: You can learn how to use Google Cloud integrations by analyzing the - `source code `_ of the particular example DAGs. + `source code `_ of the particular example DAGs. diff --git a/docs/apache-airflow-providers-http/index.rst b/docs/apache-airflow-providers-http/index.rst index 4faef8e4f0f8f..eb5a2ffb31e3c 100644 --- a/docs/apache-airflow-providers-http/index.rst +++ b/docs/apache-airflow-providers-http/index.rst @@ -39,7 +39,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-jdbc/index.rst b/docs/apache-airflow-providers-jdbc/index.rst index be064898211fa..31e5c06121479 100644 --- a/docs/apache-airflow-providers-jdbc/index.rst +++ b/docs/apache-airflow-providers-jdbc/index.rst @@ -39,7 +39,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-jenkins/index.rst b/docs/apache-airflow-providers-jenkins/index.rst index 349997d013ad1..933394ca5d315 100644 --- a/docs/apache-airflow-providers-jenkins/index.rst +++ b/docs/apache-airflow-providers-jenkins/index.rst @@ -32,7 +32,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-microsoft-azure/index.rst b/docs/apache-airflow-providers-microsoft-azure/index.rst index 1613bca38d0b3..10195210c11f4 100644 --- a/docs/apache-airflow-providers-microsoft-azure/index.rst +++ b/docs/apache-airflow-providers-microsoft-azure/index.rst @@ -41,7 +41,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-microsoft-winrm/index.rst b/docs/apache-airflow-providers-microsoft-winrm/index.rst index 7a388ac103cce..25e282ecd582a 100644 --- a/docs/apache-airflow-providers-microsoft-winrm/index.rst +++ b/docs/apache-airflow-providers-microsoft-winrm/index.rst @@ -32,7 +32,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-mysql/index.rst b/docs/apache-airflow-providers-mysql/index.rst index c16a424c45163..7aa93892df3c0 100644 --- a/docs/apache-airflow-providers-mysql/index.rst +++ b/docs/apache-airflow-providers-mysql/index.rst @@ -39,7 +39,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs .. toctree:: :maxdepth: 1 diff --git a/docs/apache-airflow-providers-neo4j/index.rst b/docs/apache-airflow-providers-neo4j/index.rst index 26086a7c9ffb0..122c25146fba5 100644 --- a/docs/apache-airflow-providers-neo4j/index.rst +++ b/docs/apache-airflow-providers-neo4j/index.rst @@ -39,7 +39,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs .. toctree:: :maxdepth: 1 diff --git a/docs/apache-airflow-providers-papermill/index.rst b/docs/apache-airflow-providers-papermill/index.rst index 86c1332a4c056..c7c20c8df8c5d 100644 --- a/docs/apache-airflow-providers-papermill/index.rst +++ b/docs/apache-airflow-providers-papermill/index.rst @@ -38,7 +38,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-plexus/index.rst b/docs/apache-airflow-providers-plexus/index.rst index fdc9874b46a5e..e3d2be6299f04 100644 --- a/docs/apache-airflow-providers-plexus/index.rst +++ b/docs/apache-airflow-providers-plexus/index.rst @@ -32,7 +32,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs .. toctree:: :maxdepth: 1 diff --git a/docs/apache-airflow-providers-postgres/index.rst b/docs/apache-airflow-providers-postgres/index.rst index 83c87f96a7c8d..2cc0029e66229 100644 --- a/docs/apache-airflow-providers-postgres/index.rst +++ b/docs/apache-airflow-providers-postgres/index.rst @@ -39,7 +39,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs .. toctree:: :maxdepth: 1 diff --git a/docs/apache-airflow-providers-qubole/index.rst b/docs/apache-airflow-providers-qubole/index.rst index 8d3f5b27aba7a..9f0b8ae7be0a0 100644 --- a/docs/apache-airflow-providers-qubole/index.rst +++ b/docs/apache-airflow-providers-qubole/index.rst @@ -32,7 +32,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-salesforce/index.rst b/docs/apache-airflow-providers-salesforce/index.rst index 94809c0a450f1..5fe31c1c87ac3 100644 --- a/docs/apache-airflow-providers-salesforce/index.rst +++ b/docs/apache-airflow-providers-salesforce/index.rst @@ -38,7 +38,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-singularity/index.rst b/docs/apache-airflow-providers-singularity/index.rst index 5842f245062f2..b2f55a0373f37 100644 --- a/docs/apache-airflow-providers-singularity/index.rst +++ b/docs/apache-airflow-providers-singularity/index.rst @@ -32,7 +32,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-snowflake/index.rst b/docs/apache-airflow-providers-snowflake/index.rst index 6413f68979231..f9af0e224b39a 100644 --- a/docs/apache-airflow-providers-snowflake/index.rst +++ b/docs/apache-airflow-providers-snowflake/index.rst @@ -39,7 +39,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-sqlite/index.rst b/docs/apache-airflow-providers-sqlite/index.rst index 3c9f521a0ee33..2abcee9166d9e 100644 --- a/docs/apache-airflow-providers-sqlite/index.rst +++ b/docs/apache-airflow-providers-sqlite/index.rst @@ -39,7 +39,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs .. toctree:: :maxdepth: 1 diff --git a/docs/apache-airflow-providers-tableau/index.rst b/docs/apache-airflow-providers-tableau/index.rst index 97e666fd8049a..2cb529cc06c2f 100644 --- a/docs/apache-airflow-providers-tableau/index.rst +++ b/docs/apache-airflow-providers-tableau/index.rst @@ -33,7 +33,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-telegram/index.rst b/docs/apache-airflow-providers-telegram/index.rst index 89d88e807c36f..fc2342303f540 100644 --- a/docs/apache-airflow-providers-telegram/index.rst +++ b/docs/apache-airflow-providers-telegram/index.rst @@ -38,7 +38,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-yandex/index.rst b/docs/apache-airflow-providers-yandex/index.rst index 4cc377a87953e..1aae282ec8902 100644 --- a/docs/apache-airflow-providers-yandex/index.rst +++ b/docs/apache-airflow-providers-yandex/index.rst @@ -39,7 +39,7 @@ Content :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository .. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-yandex/operators.rst b/docs/apache-airflow-providers-yandex/operators.rst index 30319180ac913..bf5c6819a6a06 100644 --- a/docs/apache-airflow-providers-yandex/operators.rst +++ b/docs/apache-airflow-providers-yandex/operators.rst @@ -37,4 +37,4 @@ Prerequisite Tasks Using the operators ^^^^^^^^^^^^^^^^^^^^^ -See the usage examples in `example DAGs `_ +See the usage examples in `example DAGs `_ diff --git a/docs/apache-airflow-providers/howto/create-update-providers.rst b/docs/apache-airflow-providers/howto/create-update-providers.rst index 91fb74e438d6a..e5b075df83df3 100644 --- a/docs/apache-airflow-providers/howto/create-update-providers.rst +++ b/docs/apache-airflow-providers/howto/create-update-providers.rst @@ -32,7 +32,7 @@ new provider. Another recommendation that will help you is to look for a provider that works similar to yours. That way it will help you to set up tests and other dependencies. -First, you need to set up your local development environment. See `Contribution Quick Start `_ +First, you need to set up your local development environment. See `Contribution Quick Start `_ if you did not set up your local environment yet. We recommend using ``breeze`` to develop locally. This way you easily be able to have an environment more similar to the one executed by GitHub CI workflow. @@ -124,14 +124,14 @@ Add your provider information in the following variables in ``test_providers_man Integration tests ^^^^^^^^^^^^^^^^^ -See `Airflow Integration Tests `_ +See `Airflow Integration Tests `_ Documentation ^^^^^^^^^^^^^ An important part of building a new provider is the documentation. -Some steps for documentation occurs automatically by ``pre-commit`` see `Installing pre-commit guide `_ +Some steps for documentation occurs automatically by ``pre-commit`` see `Installing pre-commit guide `_ .. code-block:: bash @@ -298,4 +298,4 @@ main Airflow documentation that involves some steps with the providers is also w How-to Update a community provider ---------------------------------- -See `Provider packages versioning `_ +See `Provider packages versioning `_ diff --git a/docs/apache-airflow-providers/index.rst b/docs/apache-airflow-providers/index.rst index 1380c8e441a8a..7329b7fd780a4 100644 --- a/docs/apache-airflow-providers/index.rst +++ b/docs/apache-airflow-providers/index.rst @@ -115,7 +115,7 @@ the package. We are using standard mechanism of python to define needs to define appropriate entry-point ``apache_airflow_provider`` which has to point to a callable implemented by your package and return a dictionary containing the list of discoverable capabilities of your package. The dictionary has to follow the -`json-schema specification `_. +`json-schema specification `_. Most of the schema provides extension point for the documentation (which you might want to also use for your own purpose) but the important fields from the extensibility point of view are those: @@ -149,7 +149,7 @@ they define the extensions properly. See :doc:`cli-and-env-variables-ref` for de sub-commands. When you write your own provider, consider following the -`Naming conventions for provider packages `_ +`Naming conventions for provider packages `_ FAQ for Airflow and Providers diff --git a/docs/apache-airflow-providers/operators-and-hooks-ref/google.rst b/docs/apache-airflow-providers/operators-and-hooks-ref/google.rst index b3435fe266ba7..4c67c5220811b 100644 --- a/docs/apache-airflow-providers/operators-and-hooks-ref/google.rst +++ b/docs/apache-airflow-providers/operators-and-hooks-ref/google.rst @@ -38,7 +38,7 @@ Airflow has extensive support for the `Google Cloud ` .. note:: You can learn how to use Google Cloud integrations by analyzing the `source code of the Google Cloud example DAGs - `_ + `_ Service operators and hooks @@ -66,7 +66,7 @@ Google Marketing Platform .. note:: You can learn how to use Google Marketing Platform integrations by analyzing the - `source code `_ + `source code `_ of the example DAGs. diff --git a/docs/apache-airflow/backport-providers.rst b/docs/apache-airflow/backport-providers.rst deleted file mode 100644 index 493d7f61dfb0e..0000000000000 --- a/docs/apache-airflow/backport-providers.rst +++ /dev/null @@ -1,115 +0,0 @@ - .. Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - .. http://www.apache.org/licenses/LICENSE-2.0 - - .. Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. - - -Backport Providers ------------------- - -.. warning:: - - We have stopped releasing the backport providers on March 17, 2021. Since then, no new changes to - providers for Airflow 2.0 are going to be released as backport packages. - It's the highest time to upgrade to Airflow 2.0. - - -Context: Airflow 2.0 operators, hooks, and secrets -'''''''''''''''''''''''''''''''''''''''''''''''''' - -We already have a lot of changes in the operators, transfers, hooks, sensors, secrets for many external systems, but -they are not used nor tested widely because they are part of the **master/2.0** release. - -As a part of Airflow 2.0, following AIP-21 "change in import paths" all the non-core interfaces to external systems of -Apache Airflow have been moved to the ``airflow.providers`` package. - -Thanks to that, the operators from Airflow 2.0 can be used in Airflow 1.10 as separately installable packages -with the constraint that those packages can only be used in Python 3.6+ environment. - -Installing Airflow 2.0 operators in Airflow 1.10 -'''''''''''''''''''''''''''''''''''''''''''''''' - -We released Backport provider packages that can be installed for older Airflow versions. These packages will be -released more frequently compared to the main Airflow 1.10.* releases. - -You will not have to upgrade your Airflow version to use those packages. You can find those packages on -`PyPI `_ -and install them separately for each provider. - -These packages are available now and can be used in the latest Airflow 1.10.* version. Most of those packages are -also installable and usable in most Airflow 1.10.* releases but there is no extensive testing done beyond the -latest released version, so you might expect more problems in earlier Airflow versions. - -An easier migration path to 2.0 -''''''''''''''''''''''''''''''' - -With backported providers package users can migrate their DAGs to the new providers package incrementally and once -they convert to the new operators/sensors/hooks they can seamlessly migrate their environments to Airflow 2.0. -The nice thing about providers backport packages is that you can use both old and new classes at the same time, -even in the same DAG. So your migration can be gradual and smooth. - -Note that in Airflow 2.0 old classes raise deprecation warning and redirect to the new classes wherever it is possible. -In some rare cases the new operators will not be fully backwards compatible, you will find information -about those cases in `UPDATING.md `_ where we -explained all such cases. - -Switching early to the Airflow 2.0 operators while still running Airflow 1.10.x will make your -migration much easier. - -Note that as of 17 March 2021 the backport providers are not released any more, so you might find additional -differences accumulating over time in the newer versions of the providers. - -Installing backport packages -''''''''''''''''''''''''''''' - -Note that the backport packages might require extra dependencies. ``pip`` installs the required dependencies -automatically when it installs the backport package but there are sometimes cross-dependencies between -the backport packages. For example ``google`` package has cross-dependency with ``amazon`` package to allow -transfers between those two cloud providers. You might need to install those packages in case you -use cross-dependent packages. The easiest way to install them is to use "extras" when installing the -package, for example the below will install both ``google`` and ``amazon`` backport packages: - -.. code-block:: bash - - pip install apache-airflow-backport-providers-google[amazon] - -This is all documented in the PyPI description of the packages as well as in the README.md file available -for each provider package. For example for ``google`` package you can find the readme in -`README.md `_. - -You will also find there the summary of both - new classes and moved classes as well as requirement information. - -Troubleshooting installing backport packages -'''''''''''''''''''''''''''''''''''''''''''' - -Backport providers only work when they are installed in the same namespace as the 'apache-airflow' 1.10 package. -This is majority of cases when you simply run pip install - it installs all packages in the same folder -(usually in ``/usr/local/lib/pythonX.Y/site-packages``). But when you install the ``apache-airflow`` and -``apache-airflow-backport-package-*`` using different methods (for example using ``pip install -e .`` or -``pip install --user``) they might be installed in different namespaces. - -If that's the case, the provider packages will not be importable (the error in such case is -``ModuleNotFoundError: No module named 'airflow.providers'``). - -If you experience the problem, you can easily fix it by creating symbolic link in your -installed "airflow" folder to the "providers" folder where you installed your backport packages. - -If you installed it with -e, this link should be created in your airflow sources, -if you installed it with the ``--user`` flag it should be from the ``~/.local/lib/pythonX.Y/site-packages/airflow/`` -folder. - -.. spelling:: - - backported diff --git a/docs/apache-airflow/dag-run.rst b/docs/apache-airflow/dag-run.rst index 07529904617b6..87ccc2934db02 100644 --- a/docs/apache-airflow/dag-run.rst +++ b/docs/apache-airflow/dag-run.rst @@ -88,7 +88,7 @@ in the configuration file. When turned off, the scheduler creates a DAG run only """ Code that goes along with the Airflow tutorial located at: - https://github.com/apache/airflow/blob/master/airflow/example_dags/tutorial.py + https://github.com/apache/airflow/blob/main/airflow/example_dags/tutorial.py """ from airflow.models.dag import DAG from airflow.operators.bash import BashOperator diff --git a/docs/apache-airflow/deprecated-rest-api-ref.rst b/docs/apache-airflow/deprecated-rest-api-ref.rst index 11ec8a7dd0b2b..877385b5af3af 100644 --- a/docs/apache-airflow/deprecated-rest-api-ref.rst +++ b/docs/apache-airflow/deprecated-rest-api-ref.rst @@ -21,7 +21,7 @@ Deprecated REST API .. warning:: This REST API is deprecated since version 2.0. Please consider using the :doc:`stable REST API `. - For more information on migration, see `UPDATING.md `_ + For more information on migration, see `UPDATING.md `_ Before Airflow 2.0 this REST API was known as the "experimental" API, but now that the :doc:`stable REST API ` is available, it has been renamed. diff --git a/docs/apache-airflow/installation.rst b/docs/apache-airflow/installation.rst index 5f498931f862f..8d1624ca879c0 100644 --- a/docs/apache-airflow/installation.rst +++ b/docs/apache-airflow/installation.rst @@ -157,7 +157,7 @@ not work or will produce unusable Airflow installation. In order to have repeatable installation, starting from **Airflow 1.10.10** and updated in **Airflow 1.10.13** we also keep a set of "known-to-be-working" constraint files in the -``constraints-master``, ``constraints-2-0`` orphan branches and then we create tag +``constraints-main``, ``constraints-2-0`` orphan branches and then we create tag for each released version e.g. :subst-code:`constraints-|version|`. This way, when we keep a tested and working set of dependencies. Those "known-to-be-working" constraints are per major/minor Python version. You can use them as constraint @@ -172,7 +172,7 @@ You can create the URL to the file substituting the variables in the template be where: -- ``AIRFLOW_VERSION`` - Airflow version (e.g. :subst-code:`|version|`) or ``master``, ``2-0``, for latest development version +- ``AIRFLOW_VERSION`` - Airflow version (e.g. :subst-code:`|version|`) or ``main``, ``2-0``, for latest development version - ``PYTHON_VERSION`` Python version e.g. ``3.8``, ``3.7`` There is also a no-providers constraint file, which contains just constraints required to install Airflow core. This allows @@ -227,7 +227,7 @@ being installed. You can manually install all the providers you need. You can continue using the "providers" constraint files but the 'versioned' airflow constraints installs only the versions of providers that were available in PyPI at -the time of preparing of the airflow version. However, usually you can use "master" version of the providers +the time of preparing of the airflow version. However, usually you can use "main" version of the providers to install latest version of providers. Usually the providers work with most versions of Airflow, if there will be any incompatibilities, it will be captured as package dependencies. @@ -235,16 +235,16 @@ will be any incompatibilities, it will be captured as package dependencies. PYTHON_VERSION="$(python --version | cut -d " " -f 2 | cut -d "." -f 1-2)" # For example: 3.6 - CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-${PYTHON_VERSION}.txt" + CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-${PYTHON_VERSION}.txt" pip install "apache-airflow-providers-google" --constraint "${CONSTRAINT_URL}" -You can also upgrade the providers to latest versions (you need to use master version of constraints for that): +You can also upgrade the providers to latest versions (you need to use main version of constraints for that): .. code-block:: bash PYTHON_VERSION="$(python --version | cut -d " " -f 2 | cut -d "." -f 1-2)" # For example: 3.6 - CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-${PYTHON_VERSION}.txt" + CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-${PYTHON_VERSION}.txt" pip install "apache-airflow-providers-google" --upgrade --constraint "${CONSTRAINT_URL}" @@ -272,9 +272,9 @@ They are based on the official release schedule of Python and Kubernetes, nicely `Kubernetes version skew policy `_. 1. We drop support for Python and Kubernetes versions when they reach EOL. We drop support for those - EOL versions in master right after EOL date, and it is effectively removed when we release the + EOL versions in main right after EOL date, and it is effectively removed when we release the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow - For example for Python 3.6 it means that we drop support in master right after 23.12.2021, and the first + For example for Python 3.6 it means that we drop support in main right after 23.12.2021, and the first MAJOR or MINOR version of Airflow released after will not have it. 2. The "oldest" supported version of Python/Kubernetes is the default one. "Default" is only meaningful @@ -283,7 +283,7 @@ They are based on the official release schedule of Python and Kubernetes, nicely are both Python 3.6 images, however the first MINOR/MAJOR release of Airflow release after 23.12.2021 will become Python 3.7 images. -3. We support a new version of Python/Kubernetes in master after they are officially released, as soon as we +3. We support a new version of Python/Kubernetes in main after they are officially released, as soon as we make them work in our CI pipeline (which might not be immediate due to dependencies catching up with new versions of Python mostly) we release a new images/support in Airflow based on the working CI setup. diff --git a/docs/docker-stack/build-arg-ref.rst b/docs/docker-stack/build-arg-ref.rst index 5af4cf7876ffc..8780970f613c5 100644 --- a/docs/docker-stack/build-arg-ref.rst +++ b/docs/docker-stack/build-arg-ref.rst @@ -58,7 +58,7 @@ Those are the most common arguments that you use when you want to build a custom +------------------------------------------+------------------------------------------+------------------------------------------+ | ``AIRFLOW_CONSTRAINTS_REFERENCE`` | | Reference (branch or tag) from GitHub | | | | where constraints file is taken from | -| | | It can be ``constraints-master`` or | +| | | It can be ``constraints-main`` or | | | | ``constraints-2-0`` for | | | | 2.0.* installation. In case of building | | | | specific version you want to point it | @@ -222,7 +222,7 @@ docker context files. +------------------------------------------+------------------------------------------+------------------------------------------+ | Build argument | Default value | Description | +==========================================+==========================================+==========================================+ -| ``AIRFLOW_BRANCH`` | ``master`` | the branch from which PIP dependencies | +| ``AIRFLOW_BRANCH`` | ``main`` | the branch from which PIP dependencies | | | | are pre-installed initially. | +------------------------------------------+------------------------------------------+------------------------------------------+ | ``AIRFLOW_REPO`` | ``apache/airflow`` | the repository from which PIP | diff --git a/docs/docker-stack/build.rst b/docs/docker-stack/build.rst index 354c88da79bb4..3a5a977cfa758 100644 --- a/docs/docker-stack/build.rst +++ b/docs/docker-stack/build.rst @@ -342,12 +342,12 @@ it to your forked version of source code without having to release it to PyPI. I a branch or tag in your repository and use the tag or branch in the URL that you point the installation to. In case of GitHyb builds you need to pass the constraints reference manually in case you want to use -specific constraints, otherwise the default ``constraints-master`` is used. +specific constraints, otherwise the default ``constraints-main`` is used. -The following example builds the production image in version ``3.7`` with default extras from the latest master version and -constraints are taken from latest version of the constraints-master branch in GitHub. +The following example builds the production image in version ``3.7`` with default extras from the latest main version and +constraints are taken from latest version of the constraints-main branch in GitHub. -.. exampleinclude:: docker-examples/customizing/github-master.sh +.. exampleinclude:: docker-examples/customizing/github-main.sh :language: bash :start-after: [START build] :end-before: [END build] @@ -356,7 +356,7 @@ The following example builds the production image with default extras from the latest ``v2-*-test`` version and constraints are taken from the latest version of the ``constraints-2-*`` branch in GitHub (for example ``v2-1-test`` branch matches ``constraints-2-1``). Note that this command might fail occasionally as only the "released version" constraints when building a -version and "master" constraints when building master are guaranteed to work. +version and "main" constraints when building main are guaranteed to work. .. exampleinclude:: docker-examples/customizing/github-v2-1-test.sh :language: bash @@ -541,4 +541,4 @@ The architecture of the images .............................. You can read more details about the images - the context, their parameters and internal structure in the -`IMAGES.rst `_ document. +`IMAGES.rst `_ document. diff --git a/docs/docker-stack/docker-examples/customizing/github-different-repository.sh b/docs/docker-stack/docker-examples/customizing/github-different-repository.sh index b980b5b7c0e88..35e685e3ad19e 100755 --- a/docs/docker-stack/docker-examples/customizing/github-different-repository.sh +++ b/docs/docker-stack/docker-examples/customizing/github-different-repository.sh @@ -23,8 +23,8 @@ cd "${AIRFLOW_SOURCES}" # [START build] docker build . \ --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-buster" \ - --build-arg AIRFLOW_INSTALLATION_METHOD="https://github.com/potiuk/airflow/archive/master.tar.gz#egg=apache-airflow" \ - --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-master" \ + --build-arg AIRFLOW_INSTALLATION_METHOD="https://github.com/potiuk/airflow/archive/main.tar.gz#egg=apache-airflow" \ + --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-main" \ --build-arg CONSTRAINTS_GITHUB_REPOSITORY="potiuk/airflow" \ --tag "$(basename "$0")" # [END build] diff --git a/docs/docker-stack/docker-examples/customizing/github-master.sh b/docs/docker-stack/docker-examples/customizing/github-master.sh index 4237e91e6ff56..3ce40acaf8775 100755 --- a/docs/docker-stack/docker-examples/customizing/github-master.sh +++ b/docs/docker-stack/docker-examples/customizing/github-master.sh @@ -24,8 +24,8 @@ cd "${AIRFLOW_SOURCES}" # [START build] docker build . \ --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \ - --build-arg AIRFLOW_INSTALLATION_METHOD="https://github.com/apache/airflow/archive/master.tar.gz#egg=apache-airflow" \ - --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-master" \ + --build-arg AIRFLOW_INSTALLATION_METHOD="https://github.com/apache/airflow/archive/main.tar.gz#egg=apache-airflow" \ + --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-main" \ --tag "$(basename "$0")" # [END build] docker rmi --force "$(basename "$0")" diff --git a/docs/docker-stack/entrypoint.rst b/docs/docker-stack/entrypoint.rst index 547518da6c810..48a774d217cfb 100644 --- a/docs/docker-stack/entrypoint.rst +++ b/docs/docker-stack/entrypoint.rst @@ -171,7 +171,7 @@ database and creating an ``admin/admin`` Admin user with the following command: --env "_AIRFLOW_DB_UPGRADE=true" \ --env "_AIRFLOW_WWW_USER_CREATE=true" \ --env "_AIRFLOW_WWW_USER_PASSWORD=admin" \ - apache/airflow:master-python3.8 webserver + apache/airflow:main-python3.8 webserver .. code-block:: bash @@ -180,7 +180,7 @@ database and creating an ``admin/admin`` Admin user with the following command: --env "_AIRFLOW_DB_UPGRADE=true" \ --env "_AIRFLOW_WWW_USER_CREATE=true" \ --env "_AIRFLOW_WWW_USER_PASSWORD_CMD=echo admin" \ - apache/airflow:master-python3.8 webserver + apache/airflow:main-python3.8 webserver The commands above perform initialization of the SQLite database, create admin user with admin password and Admin role. They also forward local port ``8080`` to the webserver port and finally start the webserver. diff --git a/docs/docker-stack/index.rst b/docs/docker-stack/index.rst index 6da9f73ce3ff0..6ee77d03675aa 100644 --- a/docs/docker-stack/index.rst +++ b/docs/docker-stack/index.rst @@ -50,7 +50,7 @@ you want to either extend or customize the image. You can see all possible extra The production images are build in DockerHub from released version and release candidates. There are also images published from branches but they are used mainly for development and testing purpose. -See `Airflow Git Branching `_ +See `Airflow Git Branching `_ for details. diff --git a/docs/exts/docs_build/dev_index_template.html.jinja2 b/docs/exts/docs_build/dev_index_template.html.jinja2 index e6d1d09c10aec..e1b9e9bcf6d39 100644 --- a/docs/exts/docs_build/dev_index_template.html.jinja2 +++ b/docs/exts/docs_build/dev_index_template.html.jinja2 @@ -45,7 +45,7 @@

Apache Airflow Documentation

- Apache Airflow - logo + Apache Airflow - logo
diff --git a/docs/exts/docs_build/lint_checks.py b/docs/exts/docs_build/lint_checks.py index 5fd996d544b4c..7b4e7b0a7c047 100644 --- a/docs/exts/docs_build/lint_checks.py +++ b/docs/exts/docs_build/lint_checks.py @@ -276,7 +276,7 @@ def check_example_dags_in_provider_tocs() -> List[DocBuildError]: if len(example_dags_dirs) == 1: package_rel_path = os.path.relpath(example_dags_dirs[0], start=ROOT_PROJECT_DIR) - github_url = f"https://github.com/apache/airflow/tree/master/{package_rel_path}" + github_url = f"https://github.com/apache/airflow/tree/main/{package_rel_path}" expected_text = f"Example DAGs <{github_url}>" else: expected_text = "Example DAGs " diff --git a/docs/helm-chart/airflow-configuration.rst b/docs/helm-chart/airflow-configuration.rst index 24fe45f913db5..2225db3fbc1c6 100644 --- a/docs/helm-chart/airflow-configuration.rst +++ b/docs/helm-chart/airflow-configuration.rst @@ -20,7 +20,7 @@ Configuring Airflow All Airflow configuration parameters (equivalent of ``airflow.cfg``) are stored in -`values.yaml `__ +`values.yaml `__ under the ``config`` key . The following code demonstrates how one would allow webserver users to view the config from within the webserver application. See the bottom line of the example: diff --git a/scripts/ci/constraints/ci_branch_constraints.sh b/scripts/ci/constraints/ci_branch_constraints.sh index 35dfa07b92890..0917ac77bd174 100755 --- a/scripts/ci/constraints/ci_branch_constraints.sh +++ b/scripts/ci/constraints/ci_branch_constraints.sh @@ -20,8 +20,8 @@ if [[ ${GITHUB_REF} == 'refs/heads/main' ]]; then echo "::set-output name=branch::constraints-main" -elif [[ ${GITHUB_REF} == 'refs/heads/master' ]]; then - echo "::set-output name=branch::constraints-master" +elif [[ ${GITHUB_REF} == 'refs/heads/main' ]]; then + echo "::set-output name=branch::constraints-main" elif [[ ${GITHUB_REF} == 'refs/heads/v2-0-test' ]]; then echo "::set-output name=branch::constraints-2-0" elif [[ ${GITHUB_REF} == 'refs/heads/v2-1-test' ]]; then diff --git a/scripts/ci/constraints/ci_commit_constraints.sh b/scripts/ci/constraints/ci_commit_constraints.sh index 7eda70f9d4d36..58afbd94ab795 100755 --- a/scripts/ci/constraints/ci_commit_constraints.sh +++ b/scripts/ci/constraints/ci_commit_constraints.sh @@ -30,5 +30,5 @@ with commit sha ${COMMIT_SHA}. All tests passed in this build so we determined we can push the updated constraints. -See https://github.com/apache/airflow/blob/master/README.md#installing-from-pypi for details. +See https://github.com/apache/airflow/blob/main/README.md#installing-from-pypi for details. " diff --git a/scripts/ci/images/ci_build_dockerhub.sh b/scripts/ci/images/ci_build_dockerhub.sh index e1279bce4e163..b838f36d445ee 100755 --- a/scripts/ci/images/ci_build_dockerhub.sh +++ b/scripts/ci/images/ci_build_dockerhub.sh @@ -56,7 +56,7 @@ if [[ ! "${DOCKER_TAG}" =~ ^[0-9].* ]]; then # is built from non-release tag. If this is not set, then building images from locally build # packages fails, because the packages with non-dev version are skipped (as they are already released) export VERSION_SUFFIX_FOR_PYPI=".dev0" - # Only build and push CI image for the nightly-master, v2-*-test branches + # Only build and push CI image for the nightly-main, v2-*-test branches # for tagged releases we build everything from PyPI, so we do not need CI images # For development images, we have to build all packages from current sources because we want to produce # `Latest and greatest` image from those branches. We need to build and push CI image as well as PROD diff --git a/scripts/ci/images/ci_prepare_ci_image_on_ci.sh b/scripts/ci/images/ci_prepare_ci_image_on_ci.sh index ed76b482fb2f4..415f7a5ad36da 100755 --- a/scripts/ci/images/ci_prepare_ci_image_on_ci.sh +++ b/scripts/ci/images/ci_prepare_ci_image_on_ci.sh @@ -38,7 +38,7 @@ function build_ci_image_on_ci() { if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} != "latest" ]]; then python_tag_suffix="-${GITHUB_REGISTRY_PULL_IMAGE_TAG}" fi - # first we pull base python image. We will need it to re-push it after master build + # first we pull base python image. We will need it to re-push it after main build # Becoming the new "latest" image for other builds build_images::wait_for_image_tag "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}" \ "${python_tag_suffix}" "${AIRFLOW_PYTHON_BASE_IMAGE}" diff --git a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh index 9b088e1486f72..fe8b489be4a05 100755 --- a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh +++ b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh @@ -42,7 +42,7 @@ function build_prod_images_on_ci() { fi if [[ "${WAIT_FOR_PYTHON_BASE_IMAGE=}" == "true" ]]; then - # first we pull base python image. We will need it to re-push it after master build + # first we pull base python image. We will need it to re-push it after main build # Becoming the new "latest" image for other builds build_images::wait_for_image_tag "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}" \ "${python_tag_suffix}" "${AIRFLOW_PYTHON_BASE_IMAGE}" diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh index 63a8e081ff134..3da10d69d02d2 100644 --- a/scripts/ci/libraries/_build_images.sh +++ b/scripts/ci/libraries/_build_images.sh @@ -401,13 +401,13 @@ function build_images::get_docker_image_names() { fi # Example: - # docker.pkg.github.com/apache/airflow/master-python3.6-v2 + # docker.pkg.github.com/apache/airflow/main-python3.6-v2 # ghcr.io/apache/airflow-v2-1-test-python-v2:3.6-slim-buster # ghcr.io/apache/airflow-python-v2:3.6-slim-buster- export GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE="${image_name}${image_separator}${AIRFLOW_PROD_BASE_TAG}${GITHUB_REGISTRY_IMAGE_SUFFIX}" # Example: - # docker.pkg.github.com/apache/airflow/master-python3.6-build-v2 - # ghcr.io/apache/airflow-master-python3.6-build-v2 + # docker.pkg.github.com/apache/airflow/main-python3.6-build-v2 + # ghcr.io/apache/airflow-main-python3.6-build-v2 export GITHUB_REGISTRY_AIRFLOW_PROD_BUILD_IMAGE="${image_name}${image_separator}${AIRFLOW_PROD_BASE_TAG}-build${GITHUB_REGISTRY_IMAGE_SUFFIX}" # Example: @@ -417,7 +417,7 @@ function build_images::get_docker_image_names() { export GITHUB_REGISTRY_PYTHON_BASE_IMAGE="${image_name}${image_separator}python${GITHUB_REGISTRY_IMAGE_SUFFIX}:${PYTHON_BASE_IMAGE_VERSION}-slim-buster" # Example: - # docker.pkg.github.com/apache/airflow/master-python3.8-ci-v2 + # docker.pkg.github.com/apache/airflow/main-python3.8-ci-v2 export GITHUB_REGISTRY_AIRFLOW_CI_IMAGE="${image_name}${image_separator}${AIRFLOW_CI_BASE_TAG}${GITHUB_REGISTRY_IMAGE_SUFFIX}" } @@ -1057,13 +1057,13 @@ in the image. It can mean one of those: -1) The master is currently broken (other PRs will fail with the same error) +1) The main is currently broken (other PRs will fail with the same error) 2) You changed some dependencies in setup.py or setup.cfg and they are conflicting. In case 1) - apologies for the trouble.Please let committers know and they will fix it. You might -be asked to rebase to the latest master after the problem is fixed. +be asked to rebase to the latest main after the problem is fixed. In case 2) - Follow the steps below: @@ -1089,14 +1089,14 @@ CI image: ${COLOR_BLUE} ./breeze build-image --upgrade-to-newer-dependencies --python 3.6 --continue-on-pip-check-failure - docker run -it apache/airflow:master-3.6-ci bash + docker run -it apache/airflow:main-3.6-ci bash ${COLOR_RESET} Production image: ${COLOR_BLUE} ./breeze build-image --production-image --upgrade-to-newer-dependencies --python 3.6 --continue-on-pip-check-failure - docker run -it apache/airflow:master-3.6 bash + docker run -it apache/airflow:main-3.6 bash ${COLOR_RESET} * You will see error messages there telling which requirements are conflicting and which packages caused the diff --git a/scripts/ci/libraries/_docker_engine_resources.sh b/scripts/ci/libraries/_docker_engine_resources.sh index 04333591481e3..5cfa1b79302e7 100644 --- a/scripts/ci/libraries/_docker_engine_resources.sh +++ b/scripts/ci/libraries/_docker_engine_resources.sh @@ -74,7 +74,7 @@ function docker_engine_resources::check_enough_resources() { if [[ ${successful_resource_check} != "true" ]];then echo - echo "${COLOR_RED}Please check https://github.com/apache/airflow/blob/master/BREEZE.rst#resources-required for details${COLOR_RESET}" + echo "${COLOR_RED}Please check https://github.com/apache/airflow/blob/main/BREEZE.rst#resources-required for details${COLOR_RESET}" echo fi } diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh index 10089879fa299..2afdc7d5b2496 100644 --- a/scripts/ci/libraries/_initialization.sh +++ b/scripts/ci/libraries/_initialization.sh @@ -274,7 +274,7 @@ function initialization::initialize_force_variables() { export FORCE_PULL_IMAGES=${FORCE_PULL_IMAGES:="false"} # By default we do not pull python base image. We should do that only when we run upgrade check in - # CI master and when we manually refresh the images to latest versions + # CI main and when we manually refresh the images to latest versions export FORCE_PULL_BASE_PYTHON_IMAGE="false" # Determines whether to force build without checking if it is needed @@ -615,7 +615,7 @@ function initialization::initialize_common_environment() { } function initialization::set_default_python_version_if_empty() { - # default version of python used to tag the "master" and "latest" images in DockerHub + # default version of python used to tag the "main" and "latest" images in DockerHub export DEFAULT_PYTHON_MAJOR_MINOR_VERSION=3.6 # default python Major/Minor version @@ -752,20 +752,20 @@ function initialization::get_environment_for_builds_on_ci() { if [[ ${CI:=} == "true" ]]; then export GITHUB_REPOSITORY="${GITHUB_REPOSITORY="apache/airflow"}" export CI_TARGET_REPO="${GITHUB_REPOSITORY}" - export CI_TARGET_BRANCH="${GITHUB_BASE_REF:="master"}" + export CI_TARGET_BRANCH="${GITHUB_BASE_REF:="main"}" export CI_BUILD_ID="${GITHUB_RUN_ID="0"}" export CI_JOB_ID="${GITHUB_JOB="0"}" export CI_EVENT_TYPE="${GITHUB_EVENT_NAME="pull_request"}" - export CI_REF="${GITHUB_REF:="refs/head/master"}" + export CI_REF="${GITHUB_REF:="refs/head/main"}" else # CI PR settings export GITHUB_REPOSITORY="${GITHUB_REPOSITORY="apache/airflow"}" export CI_TARGET_REPO="${CI_TARGET_REPO="apache/airflow"}" - export CI_TARGET_BRANCH="${DEFAULT_BRANCH="master"}" + export CI_TARGET_BRANCH="${DEFAULT_BRANCH="main"}" export CI_BUILD_ID="${CI_BUILD_ID="0"}" export CI_JOB_ID="${CI_JOB_ID="0"}" export CI_EVENT_TYPE="${CI_EVENT_TYPE="pull_request"}" - export CI_REF="${CI_REF="refs/head/master"}" + export CI_REF="${CI_REF="refs/head/main"}" fi if [[ -z "${LIBRARY_PATH:-}" && -n "${LD_LIBRARY_PATH:-}" ]]; then diff --git a/scripts/ci/libraries/_sanity_checks.sh b/scripts/ci/libraries/_sanity_checks.sh index f3cc5d1e511b8..a78e0dd910f10 100644 --- a/scripts/ci/libraries/_sanity_checks.sh +++ b/scripts/ci/libraries/_sanity_checks.sh @@ -130,7 +130,7 @@ function sanity_checks::assert_not_in_container() { You should only run this script from the host. Learn more about how we develop and test airflow at: -https://github.com/apache/airflow/blob/master/TESTING.rst +https://github.com/apache/airflow/blob/main/TESTING.rst """ exit 1 diff --git a/scripts/ci/openapi/client_codegen_diff.sh b/scripts/ci/openapi/client_codegen_diff.sh index 8e62ab3a97530..c84adf43859cb 100755 --- a/scripts/ci/openapi/client_codegen_diff.sh +++ b/scripts/ci/openapi/client_codegen_diff.sh @@ -20,7 +20,7 @@ set -eu -# HEAD^1 says the "first" parent. For PR merge commits, or master commits, this is the "right" commit. +# HEAD^1 says the "first" parent. For PR merge commits, or main commits, this is the "right" commit. # # In this example, 9c532b6 is the PR commit (HEAD^2), 4840892 is the head GitHub checks-out for us, and db121f7 is the # "merge target" (HEAD^1) -- i.e. mainline diff --git a/scripts/ci/selective_ci_checks.sh b/scripts/ci/selective_ci_checks.sh index 7e77eddec8c7c..914dfaab09e0a 100755 --- a/scripts/ci/selective_ci_checks.sh +++ b/scripts/ci/selective_ci_checks.sh @@ -209,10 +209,10 @@ function needs_ui_tests() { initialization::ga_output run-ui-tests "${@}" } -if [[ ${DEFAULT_BRANCH} == "master" ]]; then +if [[ ${DEFAULT_BRANCH} == "main" ]]; then ALL_TESTS="Always API Core Other CLI Providers WWW Integration" else - # Skips Provider tests in case current default branch is not master + # Skips Provider tests in case current default branch is not main ALL_TESTS="Always API Core Other CLI WWW Integration" fi readonly ALL_TESTS @@ -637,7 +637,7 @@ function calculate_test_types_to_run() { kubernetes_tests_needed="true" fi - if [[ ${DEFAULT_BRANCH} == "master" ]]; then + if [[ ${DEFAULT_BRANCH} == "main" ]]; then if [[ ${COUNT_PROVIDERS_CHANGED_FILES} != "0" ]]; then echo echo "Adding Providers to selected files as ${COUNT_PROVIDERS_CHANGED_FILES} Provider files changed" @@ -646,7 +646,7 @@ function calculate_test_types_to_run() { fi else echo - echo "Providers tests are not added because they are only run in case of master branch." + echo "Providers tests are not added because they are only run in case of main branch." echo fi if [[ ${COUNT_WWW_CHANGED_FILES} != "0" ]]; then diff --git a/scripts/docker/install_airflow_from_branch_tip.sh b/scripts/docker/install_airflow_from_branch_tip.sh index 83447e2cac290..925a872fa50ea 100755 --- a/scripts/docker/install_airflow_from_branch_tip.sh +++ b/scripts/docker/install_airflow_from_branch_tip.sh @@ -21,7 +21,7 @@ # to reinstall all dependencies from scratch when setup.py changes. Problem with Docker caching is that # when a file is changed, when added to docker context, it invalidates the cache and it causes Docker # build to reinstall all dependencies from scratch. This can take a loooooot of time. Therefore we install -# the dependencies first from master (and uninstall airflow right after) so that we can start installing +# the dependencies first from main (and uninstall airflow right after) so that we can start installing # deps from those pre-installed dependencies. It saves few minutes of build time when setup.py changes. # # If INSTALL_MYSQL_CLIENT is set to false, mysql extra is removed diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh index 3dbb1d596731d..fdb2d18d1a55d 100644 --- a/scripts/in_container/_in_container_utils.sh +++ b/scripts/in_container/_in_container_utils.sh @@ -47,7 +47,7 @@ function assert_in_container() { echo echo "You should only run this script in the Airflow docker container as it may override your files." echo "Learn more about how we develop and test airflow in:" - echo "https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst" + echo "https://github.com/apache/airflow/blob/main/CONTRIBUTING.rst" echo exit 1 fi diff --git a/scripts/in_container/update_quarantined_test_status.py b/scripts/in_container/update_quarantined_test_status.py index 73cf962904c58..06b7831163f16 100755 --- a/scripts/in_container/update_quarantined_test_status.py +++ b/scripts/in_container/update_quarantined_test_status.py @@ -68,7 +68,7 @@ class TestHistory(NamedTuple): def get_url(result: TestResult) -> str: return ( f"[{result.name}](https://github.com/{user}/{repo}/blob/" - f"master/{result.file}?test_id={result.test_id}#L{result.line})" + f"main/{result.file}?test_id={result.test_id}#L{result.line})" ) diff --git a/tests/conftest.py b/tests/conftest.py index bbf617d65ed1a..a5af5baf71093 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -353,7 +353,7 @@ def skip_if_credential_file_missing(item): def skip_if_airflow_2_test(item): for _ in item.iter_markers(name="airflow_2"): if os.environ.get("RUN_AIRFLOW_1_10") == "true": - pytest.skip("The test works only with Airflow 2.0 / master branch") + pytest.skip("The test works only with Airflow 2.0 / main branch") def pytest_runtest_setup(item): diff --git a/tests/providers/google/cloud/operators/test_cloud_build.py b/tests/providers/google/cloud/operators/test_cloud_build.py index 3d0e0f009bd9c..55c413ccee0e1 100644 --- a/tests/providers/google/cloud/operators/test_cloud_build.py +++ b/tests/providers/google/cloud/operators/test_cloud_build.py @@ -49,7 +49,7 @@ def test_verify_source(self): [ ( "https://source.developers.google.com/p/airflow-project/r/airflow-repo", - {"projectId": "airflow-project", "repoName": "airflow-repo", "branchName": "master"}, + {"projectId": "airflow-project", "repoName": "airflow-repo", "branchName": "main"}, ), ( "https://source.developers.google.com/p/airflow-project/r/airflow-repo#branch-name", @@ -192,7 +192,7 @@ def test_repo_source_replace(self, hook_mock): "repoSource": { "projectId": "airflow-project", "repoName": "airflow-repo", - "branchName": "master", + "branchName": "main", } }, # [END howto_operator_gcp_cloud_build_source_repo_dict] diff --git a/tests/providers/google/cloud/operators/test_cloud_build_system_helper.py b/tests/providers/google/cloud/operators/test_cloud_build_system_helper.py index 5a1c8499040e3..c70db9b019ced 100755 --- a/tests/providers/google/cloud/operators/test_cloud_build_system_helper.py +++ b/tests/providers/google/cloud/operators/test_cloud_build_system_helper.py @@ -79,7 +79,7 @@ def create_repository_and_bucket(self): GCP_PROJECT_ID, GCP_REPOSITORY_NAME ) self.execute_cmd(["git", "remote", "add", "origin", repo_url], cwd=tmp_dir) - self.execute_cmd(["git", "push", "--force", "origin", "master"], cwd=tmp_dir) + self.execute_cmd(["git", "push", "--force", "origin", "main"], cwd=tmp_dir) def delete_repo(self): """Delete repository in Google Cloud Source Repository service""" diff --git a/tests/test_utils/perf/scheduler_dag_execution_timing.py b/tests/test_utils/perf/scheduler_dag_execution_timing.py index 680b2ead045ec..a53ceceb6753a 100755 --- a/tests/test_utils/perf/scheduler_dag_execution_timing.py +++ b/tests/test_utils/perf/scheduler_dag_execution_timing.py @@ -224,7 +224,7 @@ def main(num_runs, repeat, pre_create_dag_runs, executor_class, dag_ids): """ # Turn on unit test mode so that we don't do any sleep() in the scheduler - # loop - not needed on master, but this script can run against older + # loop - not needed on main, but this script can run against older # releases too! os.environ['AIRFLOW__CORE__UNIT_TEST_MODE'] = 'True'