Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

use explicit --mount with types of mounts rather than --volume flags #23982

Merged
merged 1 commit into from
May 28, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions breeze-legacy
Original file line number Diff line number Diff line change
Expand Up @@ -2595,8 +2595,6 @@ breeze::check_and_save_all_params

initialization::make_constants_read_only

sanity_checks::sanitize_mounted_files

breeze::prepare_command_files

breeze::run_build_command
Expand Down
85 changes: 47 additions & 38 deletions dev/breeze/src/airflow_breeze/utils/docker_command_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,36 +65,36 @@
)

NECESSARY_HOST_VOLUMES = [
"/.bash_aliases:/root/.bash_aliases:cached",
"/.bash_history:/root/.bash_history:cached",
"/.coveragerc:/opt/airflow/.coveragerc:cached",
"/.dockerignore:/opt/airflow/.dockerignore:cached",
"/.flake8:/opt/airflow/.flake8:cached",
"/.github:/opt/airflow/.github:cached",
"/.inputrc:/root/.inputrc:cached",
"/.rat-excludes:/opt/airflow/.rat-excludes:cached",
"/RELEASE_NOTES.rst:/opt/airflow/RELEASE_NOTES.rst:cached",
"/LICENSE:/opt/airflow/LICENSE:cached",
"/MANIFEST.in:/opt/airflow/MANIFEST.in:cached",
"/NOTICE:/opt/airflow/NOTICE:cached",
"/airflow:/opt/airflow/airflow:cached",
"/provider_packages:/opt/airflow/provider_packages:cached",
"/dags:/opt/airflow/dags:cached",
"/dev:/opt/airflow/dev:cached",
"/docs:/opt/airflow/docs:cached",
"/hooks:/opt/airflow/hooks:cached",
"/logs:/root/airflow/logs:cached",
"/pyproject.toml:/opt/airflow/pyproject.toml:cached",
"/pytest.ini:/opt/airflow/pytest.ini:cached",
"/scripts:/opt/airflow/scripts:cached",
"/scripts/docker/entrypoint_ci.sh:/entrypoint:cached",
"/setup.cfg:/opt/airflow/setup.cfg:cached",
"/setup.py:/opt/airflow/setup.py:cached",
"/tests:/opt/airflow/tests:cached",
"/kubernetes_tests:/opt/airflow/kubernetes_tests:cached",
"/docker_tests:/opt/airflow/docker_tests:cached",
"/chart:/opt/airflow/chart:cached",
"/metastore_browser:/opt/airflow/metastore_browser:cached",
(".bash_aliases", "/root/.bash_aliases"),
(".bash_history", "/root/.bash_history"),
(".coveragerc", "/opt/airflow/.coveragerc"),
(".dockerignore", "/opt/airflow/.dockerignore"),
(".flake8", "/opt/airflow/.flake8"),
(".github", "/opt/airflow/.github"),
(".inputrc", "/root/.inputrc"),
(".rat-excludes", "/opt/airflow/.rat-excludes"),
("RELEASE_NOTES.rst", "/opt/airflow/RELEASE_NOTES.rst"),
("LICENSE", "/opt/airflow/LICENSE"),
("MANIFEST.in", "/opt/airflow/MANIFEST.in"),
("NOTICE", "/opt/airflow/NOTICE"),
("airflow", "/opt/airflow/airflow"),
("provider_packages", "/opt/airflow/provider_packages"),
("dags", "/opt/airflow/dags"),
("dev", "/opt/airflow/dev"),
("docs", "/opt/airflow/docs"),
("hooks", "/opt/airflow/hooks"),
("logs", "/root/airflow/logs"),
("pyproject.toml", "/opt/airflow/pyproject.toml"),
("pytest.ini", "/opt/airflow/pytest.ini"),
("scripts", "/opt/airflow/scripts"),
("scripts/docker/entrypoint_ci.sh", "/entrypoint"),
("setup.cfg", "/opt/airflow/setup.cfg"),
("setup.py", "/opt/airflow/setup.py"),
("tests", "/opt/airflow/tests"),
("kubernetes_tests", "/opt/airflow/kubernetes_tests"),
("docker_tests", "/opt/airflow/docker_tests"),
("chart", "/opt/airflow/chart"),
("metastore_browser", "/opt/airflow/metastore_browser"),
]


Expand All @@ -117,17 +117,26 @@ def get_extra_docker_flags(mount_sources: str) -> List[str]:
"""
extra_docker_flags = []
if mount_sources == MOUNT_ALL:
extra_docker_flags.extend(["-v", f"{AIRFLOW_SOURCES_ROOT}:/opt/airflow/:cached"])
extra_docker_flags.extend(["--mount", f"type=bind,src={AIRFLOW_SOURCES_ROOT},dst=/opt/airflow/"])
elif mount_sources == MOUNT_SELECTED:
for flag in NECESSARY_HOST_VOLUMES:
extra_docker_flags.extend(["-v", str(AIRFLOW_SOURCES_ROOT) + flag])
extra_docker_flags.extend(['-v', "docker-compose_mypy-cache-volume:/opt/airflow/.mypy_cache/"])
for (src, dst) in NECESSARY_HOST_VOLUMES:
if (AIRFLOW_SOURCES_ROOT / src).exists():
extra_docker_flags.extend(
["--mount", f'type=bind,src={AIRFLOW_SOURCES_ROOT / src},dst={dst}']
)
extra_docker_flags.extend(
['--mount', "type=volume,src=docker-compose_mypy-cache-volume,dst=/opt/airflow/.mypy_cache"]
)
else: # none
extra_docker_flags.extend(["-v", f"{AIRFLOW_SOURCES_ROOT / 'empty'}:/opt/airflow/airflow"])
extra_docker_flags.extend(["-v", f"{AIRFLOW_SOURCES_ROOT}/files:/files"])
extra_docker_flags.extend(["-v", f"{AIRFLOW_SOURCES_ROOT}/dist:/dist"])
extra_docker_flags.extend(
["--mount", f"type=bind,src={AIRFLOW_SOURCES_ROOT / 'empty'},dst=/opt/airflow/airflow"]
)
extra_docker_flags.extend(["--mount", f"type=bind,src={AIRFLOW_SOURCES_ROOT / 'files'},dst=/files"])
extra_docker_flags.extend(["--mount", f"type=bind,src={AIRFLOW_SOURCES_ROOT / 'dist'},dst=/dist"])
extra_docker_flags.extend(["--rm"])
extra_docker_flags.extend(["--env-file", f"{AIRFLOW_SOURCES_ROOT}/scripts/ci/docker-compose/_docker.env"])
extra_docker_flags.extend(
["--env-file", f"{AIRFLOW_SOURCES_ROOT / 'scripts' / 'ci' / 'docker-compose' / '_docker.env' }"]
)
return extra_docker_flags


Expand Down
4 changes: 3 additions & 1 deletion dev/breeze/src/airflow_breeze/utils/path_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,10 +237,11 @@ def find_airflow_sources_root_to_operate_on() -> Path:
return airflow_sources


AIRFLOW_SOURCES_ROOT = find_airflow_sources_root_to_operate_on()
AIRFLOW_SOURCES_ROOT = find_airflow_sources_root_to_operate_on().resolve()
BUILD_CACHE_DIR = AIRFLOW_SOURCES_ROOT / '.build'
FILES_DIR = AIRFLOW_SOURCES_ROOT / 'files'
MSSQL_DATA_VOLUME = AIRFLOW_SOURCES_ROOT / 'tmp_mssql_volume'
KUBE_DIR = AIRFLOW_SOURCES_ROOT / ".kube"
LOGS_DIR = AIRFLOW_SOURCES_ROOT / 'logs'
DIST_DIR = AIRFLOW_SOURCES_ROOT / 'dist'
SCRIPTS_CI_DIR = AIRFLOW_SOURCES_ROOT / 'scripts' / 'ci'
Expand All @@ -258,6 +259,7 @@ def create_directories() -> None:
BUILD_CACHE_DIR.mkdir(parents=True, exist_ok=True)
FILES_DIR.mkdir(parents=True, exist_ok=True)
MSSQL_DATA_VOLUME.mkdir(parents=True, exist_ok=True)
KUBE_DIR.mkdir(parents=True, exist_ok=True)
LOGS_DIR.mkdir(parents=True, exist_ok=True)
DIST_DIR.mkdir(parents=True, exist_ok=True)
OUTPUT_LOG.mkdir(parents=True, exist_ok=True)
2 changes: 1 addition & 1 deletion dev/breeze/tests/test_commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def test_get_extra_docker_flags_all():
def test_get_extra_docker_flags_selected():
flags = get_extra_docker_flags(MOUNT_SELECTED)
assert "empty" not in "".join(flags)
assert len(flags) > 60
assert len(flags) > 40


def test_get_extra_docker_flags_none():
Expand Down
5 changes: 5 additions & 0 deletions scripts/ci/libraries/_initialization.sh
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,10 @@ function initialization::create_directories() {
export BUILD_CACHE_DIR="${AIRFLOW_SOURCES}/.build"
readonly BUILD_CACHE_DIR

# Directory where Kind credential information is kept
export KUBE_CACHE_DIR="${AIRFLOW_SOURCES}/.kube"
readonly KUBE_CACHE_DIR

# In case of tmpfs backend for docker, mssql fails because TMPFS does not support
# O_DIRECT parameter for direct writing to the filesystem
# https://github.com/microsoft/mssql-docker/issues/13
Expand All @@ -55,6 +59,7 @@ function initialization::create_directories() {
mkdir -p "${BUILD_CACHE_DIR}" >/dev/null
mkdir -p "${FILES_DIR}" >/dev/null
mkdir -p "${MSSQL_DATA_VOLUME}" >/dev/null
mkdir -p "${KUBE_CACHE_DIR}" >/dev/null
# MSSQL 2019 runs with non-root user by default so we have to make the volumes world-writeable
# This is a bit scary and we could get by making it group-writeable but the group would have
# to be set to "root" (GID=0) for the volume to work and this cannot be accomplished without sudo
Expand Down
14 changes: 0 additions & 14 deletions scripts/ci/libraries/_sanity_checks.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,19 +26,6 @@ function sanity_checks::sanitize_file() {
touch "${1}"
}

# Those files are mounted into container when run locally
# .bash_history is preserved and you can modify .bash_aliases and .inputrc
# according to your liking
function sanity_checks::sanitize_mounted_files() {
sanity_checks::sanitize_file "${AIRFLOW_SOURCES}/.bash_history"
sanity_checks::sanitize_file "${AIRFLOW_SOURCES}/.bash_aliases"
sanity_checks::sanitize_file "${AIRFLOW_SOURCES}/.inputrc"

# When KinD cluster is created, the folder keeps authentication information
# across sessions
mkdir -p "${AIRFLOW_SOURCES}/.kube" >/dev/null 2>&1
}

#
# Creates cache directory where we will keep temporary files needed for the docker build
#
Expand Down Expand Up @@ -150,5 +137,4 @@ function sanity_checks::basic_sanity_checks() {
initialization::set_default_python_version_if_empty
sanity_checks::go_to_airflow_sources
sanity_checks::check_if_coreutils_installed
sanity_checks::sanitize_mounted_files
}