Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ def get_fastapi_app(self) -> FastAPI | None:
"""
from airflow.api_fastapi.auth.managers.simple.routes.login import login_router

dev_mode = os.environ.get("DEV_MODE", False) == "true"
dev_mode = os.environ.get("DEV_MODE", str(False)) == "true"
directory = Path(__file__).parent.joinpath("ui", "dev" if dev_mode else "dist")
directory.mkdir(exist_ok=True)

Expand Down
2 changes: 1 addition & 1 deletion airflow-core/src/airflow/api_fastapi/core_api/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def init_views(app: FastAPI) -> None:
app.include_router(ui_router)
app.include_router(public_router)

dev_mode = os.environ.get("DEV_MODE", False) == "true"
dev_mode = os.environ.get("DEV_MODE", str(False)) == "true"

directory = Path(AIRFLOW_PATH) / ("airflow/ui/dev" if dev_mode else "airflow/ui/dist")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,7 @@ def generate_packages_metadata(self):
"Quantity": 1,
"Items": ["/*"],
},
"CallerReference": str(int(os.environ.get("GITHUB_RUN_ID", 0))),
"CallerReference": str(int(os.environ.get("GITHUB_RUN_ID", str(0)))),
},
)
get_console().print(
Expand Down
2 changes: 1 addition & 1 deletion docker-tests/tests/docker_tests/test_prod_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
if not provider_id.startswith("#")
]

testing_slim_image = os.environ.get("TEST_SLIM_IMAGE", False)
testing_slim_image = os.environ.get("TEST_SLIM_IMAGE", str(False)).lower() in ("true", "1", "yes")


class TestCommands:
Expand Down
4 changes: 2 additions & 2 deletions providers/amazon/tests/system/amazon/aws/example_bedrock.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,12 +65,12 @@
# Creating a custom model takes nearly two hours. If SKIP_LONG_TASKS
# is True then these tasks will be skipped. This way we can still have
# the code snippets for docs, and we can manually run the full tests.
SKIP_LONG_TASKS = environ.get("SKIP_LONG_SYSTEM_TEST_TASKS", default=True)
SKIP_LONG_TASKS = environ.get("SKIP_LONG_SYSTEM_TEST_TASKS", str(True))

# No-commitment Provisioned Throughput is currently restricted to external
# customers only and will fail with a ServiceQuotaExceededException if run
# on the AWS System Test stack.
SKIP_PROVISION_THROUGHPUT = environ.get("SKIP_RESTRICTED_SYSTEM_TEST_TASKS", default=True)
SKIP_PROVISION_THROUGHPUT = environ.get("SKIP_RESTRICTED_SYSTEM_TEST_TASKS", str(True))

LLAMA_SHORT_MODEL_ID = "meta.llama3-8b-instruct-v1:0"
TITAN_MODEL_ID = "amazon.titan-text-express-v1:0:8k"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

from __future__ import annotations

import os
from typing import TYPE_CHECKING, Any
from urllib.parse import unquote

Expand Down Expand Up @@ -48,10 +47,6 @@
from airflow.utils.context import Context


REPAIR_WAIT_ATTEMPTS = os.getenv("DATABRICKS_REPAIR_WAIT_ATTEMPTS", 20)
REPAIR_WAIT_DELAY = os.getenv("DATABRICKS_REPAIR_WAIT_DELAY", 0.5)


def get_auth_decorator():
if AIRFLOW_V_3_0_PLUS:
from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from airflow.providers.databricks.operators.databricks_workflow import DatabricksWorkflowTaskGroup
from airflow.utils.timezone import datetime

EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", 6))
EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", str(6)))

DATABRICKS_CONN_ID = os.getenv("DATABRICKS_CONN_ID", "databricks_default")
DATABRICKS_NOTIFICATION_EMAIL = os.getenv("DATABRICKS_NOTIFICATION_EMAIL", "your_email@serviceprovider.com")
Expand Down
12 changes: 9 additions & 3 deletions providers/edge3/src/airflow/providers/edge3/cli/api_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,18 @@
# Note: Given defaults make attempts after 1, 3, 7, 15, 31seconds, 1:03, 2:07, 3:37 and fails after 5:07min
# So far there is no other config facility in Task SDK we use ENV for the moment
# TODO: Consider these env variables jointly in task sdk together with task_sdk/src/airflow/sdk/api/client.py
API_RETRIES = int(os.getenv("AIRFLOW__EDGE__API_RETRIES", os.getenv("AIRFLOW__WORKERS__API_RETRIES", 10)))
API_RETRIES = int(
os.getenv("AIRFLOW__EDGE__API_RETRIES", os.getenv("AIRFLOW__WORKERS__API_RETRIES", str(10)))
)
API_RETRY_WAIT_MIN = float(
os.getenv("AIRFLOW__EDGE__API_RETRY_WAIT_MIN", os.getenv("AIRFLOW__WORKERS__API_RETRY_WAIT_MIN", 1.0))
os.getenv(
"AIRFLOW__EDGE__API_RETRY_WAIT_MIN", os.getenv("AIRFLOW__WORKERS__API_RETRY_WAIT_MIN", str(1.0))
)
)
API_RETRY_WAIT_MAX = float(
os.getenv("AIRFLOW__EDGE__API_RETRY_WAIT_MAX", os.getenv("AIRFLOW__WORKERS__API_RETRY_WAIT_MAX", 90.0))
os.getenv(
"AIRFLOW__EDGE__API_RETRY_WAIT_MAX", os.getenv("AIRFLOW__WORKERS__API_RETRY_WAIT_MAX", str(90.0))
)
)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@
EXAMPLE_FILE = "storage-transfer/big_file.dat"
BUCKET_SOURCE_AWS = f"bucket-aws-{DAG_ID}-{ENV_ID}".replace("_", "-")
BUCKET_TARGET_GCS = f"bucket-gcs-{DAG_ID}-{ENV_ID}".replace("_", "-")
WAIT_FOR_OPERATION_POKE_INTERVAL = int(os.environ.get("WAIT_FOR_OPERATION_POKE_INTERVAL", 5))
WAIT_FOR_OPERATION_POKE_INTERVAL = int(os.environ.get("WAIT_FOR_OPERATION_POKE_INTERVAL", str(5)))

GCP_DESCRIPTION = "description"
GCP_TRANSFER_JOB_NAME = f"transferJobs/sampleJob-{DAG_ID}-{ENV_ID}".replace("-", "_")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
except ImportError:
pytest.skip("Azure Service Bus not available", allow_module_level=True)

EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", 6))
EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", str(6)))

CLIENT_ID = os.getenv("CLIENT_ID", "")
QUEUE_NAME = "sb_mgmt_queue_test"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@
from airflow.providers.microsoft.azure.operators.synapse import AzureSynapseRunSparkBatchOperator

AIRFLOW_HOME = os.getenv("AIRFLOW_HOME", "/usr/local/airflow")
EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", 6))
EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", str(6)))

default_args = {
"execution_timeout": timedelta(hours=EXECUTION_TIMEOUT),
"retries": int(os.getenv("DEFAULT_TASK_RETRIES", 2)),
"retry_delay": timedelta(seconds=int(os.getenv("DEFAULT_RETRY_DELAY_SECONDS", 60))),
"retries": int(os.getenv("DEFAULT_TASK_RETRIES", str(2))),
"retry_delay": timedelta(seconds=int(os.getenv("DEFAULT_RETRY_DELAY_SECONDS", str(60)))),
}

SPARK_JOB_PAYLOAD = {
Expand Down
6 changes: 6 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -595,6 +595,12 @@ extend-select = [
"PLW0245", # super call is missing parentheses
"PLW0406", # Module {name} imports itself
"PLW0602", # Using global for {name} but no assignment is done
"PLW0604", # global at module level is redundant
"PLW0642", # Reassigned {} variable in {method_type} method
"PLW0711", # Exception to catch is the result of a binary and operation
"PLW1501", # {mode} is not a valid mode for open
"PLW1507", # Shallow copy of os.environ via copy.copy(os.environ)
"PLW1508", # Invalid type for environment variable default; expected str or None
# Per rule enables
"RUF006", # Checks for asyncio dangling task
"RUF015", # Checks for unnecessary iterable allocation for first element
Expand Down
4 changes: 2 additions & 2 deletions scripts/in_container/update_quarantined_test_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,8 +213,8 @@ def get_table(history_map: dict[str, TestHistory]) -> str:
raise RuntimeError("GitHub Repository must be defined!")
user, repo = github_repository.split("/")
print(f"User: {user}, Repo: {repo}")
issue_id = int(os.environ.get("ISSUE_ID", 0))
num_runs = int(os.environ.get("NUM_RUNS", 10))
issue_id = int(os.environ.get("ISSUE_ID", str(0)))
num_runs = int(os.environ.get("NUM_RUNS", str(10)))

if issue_id == 0:
raise RuntimeError("You need to define ISSUE_ID as environment variable")
Expand Down