diff --git a/airflow/providers/amazon/aws/hooks/batch_client.py b/airflow/providers/amazon/aws/hooks/batch_client.py index 6eda1d29667a3..a7c973bf1e761 100644 --- a/airflow/providers/amazon/aws/hooks/batch_client.py +++ b/airflow/providers/amazon/aws/hooks/batch_client.py @@ -27,7 +27,7 @@ from __future__ import annotations import itertools -from random import uniform +import random from time import sleep from typing import TYPE_CHECKING, Callable @@ -529,7 +529,7 @@ def add_jitter(delay: int | float, width: int | float = 1, minima: int | float = minima = abs(minima) lower = max(minima, delay - width) upper = delay + width - return uniform(lower, upper) + return random.uniform(lower, upper) @staticmethod def delay(delay: int | float | None = None) -> None: @@ -546,7 +546,7 @@ def delay(delay: int | float | None = None) -> None: when many concurrent tasks request job-descriptions. """ if delay is None: - delay = uniform(BatchClientHook.DEFAULT_DELAY_MIN, BatchClientHook.DEFAULT_DELAY_MAX) + delay = random.uniform(BatchClientHook.DEFAULT_DELAY_MIN, BatchClientHook.DEFAULT_DELAY_MAX) else: delay = BatchClientHook.add_jitter(delay) sleep(delay) @@ -594,4 +594,4 @@ def exp(tries): max_interval = 600.0 # results in 3 to 10 minute delay delay = 1 + pow(tries * 0.6, 2) delay = min(max_interval, delay) - return uniform(delay / 3, delay) + return random.uniform(delay / 3, delay) diff --git a/airflow/utils/strings.py b/airflow/utils/strings.py index 421c47fce5ff3..e2ad9ddfcf641 100644 --- a/airflow/utils/strings.py +++ b/airflow/utils/strings.py @@ -17,13 +17,13 @@ """Common utility functions with strings.""" from __future__ import annotations +import random import string -from random import choice def get_random_string(length=8, choices=string.ascii_letters + string.digits): """Generate random string.""" - return "".join(choice(choices) for _ in range(length)) + return "".join(random.choices(choices, k=length)) TRUE_LIKE_VALUES = {"on", "t", "true", "y", "yes", "1"} diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py index 5216b08f6dfc0..82527e6710a6a 100644 --- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py @@ -17,11 +17,11 @@ """Various utils to prepare docker and docker compose commands.""" from __future__ import annotations +import copy import os +import random import re import sys -from copy import deepcopy -from random import randint from subprocess import DEVNULL, CalledProcessError, CompletedProcess from typing import TYPE_CHECKING @@ -372,7 +372,7 @@ def get_env_variable_value(arg_name: str, params: CommonBuildParams | ShellParam value = "true" if raw_value is True else value value = "false" if raw_value is False else value if arg_name == "upgrade_to_newer_dependencies" and value == "true": - value = f"{randint(0, 2**32):x}" + value = f"{random.randrange(2**32):x}" return value @@ -509,7 +509,7 @@ def construct_docker_push_command( def build_cache(image_params: CommonBuildParams, output: Output | None) -> RunCommandResult: build_command_result: CompletedProcess | CalledProcessError = CompletedProcess(args=[], returncode=0) for platform in image_params.platforms: - platform_image_params = deepcopy(image_params) + platform_image_params = copy.deepcopy(image_params) # override the platform in the copied params to only be single platform per run # as a workaround to https://github.com/docker/buildx/issues/1044 platform_image_params.platform = platform @@ -721,7 +721,7 @@ def warm_up_docker_builder(image_params: CommonBuildParams): return docker_syntax = get_docker_syntax_version() get_console().print(f"[info]Warming up the {docker_context} builder for syntax: {docker_syntax}") - warm_up_image_param = deepcopy(image_params) + warm_up_image_param = copy.deepcopy(image_params) warm_up_image_param.image_tag = "warmup" warm_up_image_param.push = False build_command = prepare_base_build_command(image_params=warm_up_image_param) diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py index fb38e8f2dc06b..5d8d209cc504a 100755 --- a/dev/provider_packages/prepare_provider_packages.py +++ b/dev/provider_packages/prepare_provider_packages.py @@ -26,6 +26,7 @@ import json import logging import os +import random import re import shutil import subprocess @@ -39,7 +40,6 @@ from functools import lru_cache from os.path import dirname, relpath from pathlib import Path -from random import choice from shutil import copyfile from typing import Any, Generator, Iterable, NamedTuple @@ -1214,7 +1214,7 @@ def get_type_of_changes(answer: str | None) -> TypeOfChange: given_answer = "" if answer and answer.lower() in ["yes", "y"]: # Simulate all possible non-terminal answers - return choice( + return random.choice( [ TypeOfChange.DOCUMENTATION, TypeOfChange.BUGFIX, diff --git a/tests/models/test_cleartasks.py b/tests/models/test_cleartasks.py index a832d3ab854cf..01fbb85e403ba 100644 --- a/tests/models/test_cleartasks.py +++ b/tests/models/test_cleartasks.py @@ -18,6 +18,7 @@ from __future__ import annotations import datetime +import random import pytest @@ -580,25 +581,23 @@ def test_dags_clear(self): assert tis[i].max_tries == 1 # test only_failed - from random import randint - - failed_dag_idx = randint(0, len(tis) - 1) - tis[failed_dag_idx].state = State.FAILED - session.merge(tis[failed_dag_idx]) + failed_dag = random.choice(tis) + failed_dag.state = State.FAILED + session.merge(failed_dag) session.commit() DAG.clear_dags(dags, only_failed=True) - for i in range(num_of_dags): - tis[i].refresh_from_db() - if i != failed_dag_idx: - assert tis[i].state == State.SUCCESS - assert tis[i].try_number == 3 - assert tis[i].max_tries == 1 + for ti in tis: + ti.refresh_from_db() + if ti is failed_dag: + assert ti.state == State.NONE + assert ti.try_number == 3 + assert ti.max_tries == 2 else: - assert tis[i].state == State.NONE - assert tis[i].try_number == 3 - assert tis[i].max_tries == 2 + assert ti.state == State.SUCCESS + assert ti.try_number == 3 + assert ti.max_tries == 1 def test_operator_clear(self, dag_maker): with dag_maker( diff --git a/tests/providers/amazon/aws/hooks/test_batch_client.py b/tests/providers/amazon/aws/hooks/test_batch_client.py index ea04cb2080cc5..e501da3c067d7 100644 --- a/tests/providers/amazon/aws/hooks/test_batch_client.py +++ b/tests/providers/amazon/aws/hooks/test_batch_client.py @@ -426,7 +426,7 @@ def test_add_jitter(self): assert result >= minima assert result <= width - @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.uniform") + @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.random.uniform") @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.sleep") def test_delay_defaults(self, mock_sleep, mock_uniform): assert BatchClientHook.DEFAULT_DELAY_MIN == 1 @@ -438,21 +438,21 @@ def test_delay_defaults(self, mock_sleep, mock_uniform): ) mock_sleep.assert_called_once_with(0) - @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.uniform") + @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.random.uniform") @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.sleep") def test_delay_with_zero(self, mock_sleep, mock_uniform): self.batch_client.delay(0) mock_uniform.assert_called_once_with(0, 1) # in add_jitter mock_sleep.assert_called_once_with(mock_uniform.return_value) - @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.uniform") + @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.random.uniform") @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.sleep") def test_delay_with_int(self, mock_sleep, mock_uniform): self.batch_client.delay(5) mock_uniform.assert_called_once_with(4, 6) # in add_jitter mock_sleep.assert_called_once_with(mock_uniform.return_value) - @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.uniform") + @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.random.uniform") @mock.patch("airflow.providers.amazon.aws.hooks.batch_client.sleep") def test_delay_with_float(self, mock_sleep, mock_uniform): self.batch_client.delay(5.0) diff --git a/tests/providers/amazon/aws/utils/test_identifiers.py b/tests/providers/amazon/aws/utils/test_identifiers.py index e0334a34727d9..40c1aba2420d1 100644 --- a/tests/providers/amazon/aws/utils/test_identifiers.py +++ b/tests/providers/amazon/aws/utils/test_identifiers.py @@ -41,10 +41,7 @@ def setup_namespace(self, request): def test_deterministic(self): """Test that result is deterministic and a valid UUID object""" - args = [ - "".join(random.choice(string.ascii_letters) for _ in range(random.randint(3, 13))) - for _ in range(100) - ] + args = ["".join(random.choices(string.ascii_letters, k=random.randint(3, 13))) for _ in range(100)] result = generate_uuid(*args, **self.kwargs) assert result == generate_uuid(*args, **self.kwargs) assert uuid.UUID(result).version == 5, "Should generate UUID v5" diff --git a/tests/providers/microsoft/conftest.py b/tests/providers/microsoft/conftest.py index aa75c95203c68..bcf5aa65fe6eb 100644 --- a/tests/providers/microsoft/conftest.py +++ b/tests/providers/microsoft/conftest.py @@ -17,8 +17,8 @@ from __future__ import annotations +import random import string -from random import choices from typing import TypeVar import pytest @@ -33,7 +33,9 @@ def create_mock_connection(monkeypatch): """Helper fixture for create test connection.""" def wrapper(conn: T, conn_id: str | None = None): - conn_id = conn_id or "test_conn_" + "".join(choices(string.ascii_lowercase + string.digits, k=6)) + conn_id = conn_id or "test_conn_" + "".join( + random.choices(string.ascii_lowercase + string.digits, k=6) + ) if isinstance(conn, dict): conn = Connection.from_json(conn) elif isinstance(conn, str): diff --git a/tests/providers/oracle/operators/test_oracle.py b/tests/providers/oracle/operators/test_oracle.py index 0c382824ba6db..f4311362b0cf1 100644 --- a/tests/providers/oracle/operators/test_oracle.py +++ b/tests/providers/oracle/operators/test_oracle.py @@ -16,8 +16,8 @@ # under the License. from __future__ import annotations +import random import re -from random import randrange from unittest import mock import oracledb @@ -90,7 +90,7 @@ def test_push_oracle_exit_to_xcom(self, mock_callproc, request, dag_maker): oracle_conn_id = "oracle_default" parameters = {"parameter": "value"} task_id = "test_push" - ora_exit_code = f"{randrange(10**5):05}" + ora_exit_code = f"{random.randrange(10**5):05}" error = f"ORA-{ora_exit_code}: This is a five-digit ORA error code" mock_callproc.side_effect = oracledb.DatabaseError(error) diff --git a/tests/providers/ssh/operators/test_ssh.py b/tests/providers/ssh/operators/test_ssh.py index 866cf5b96b881..401dc38dd571e 100644 --- a/tests/providers/ssh/operators/test_ssh.py +++ b/tests/providers/ssh/operators/test_ssh.py @@ -17,7 +17,7 @@ # under the License. from __future__ import annotations -from random import randrange +import random from unittest import mock import pytest @@ -217,7 +217,7 @@ def test_command_errored(self): def test_push_ssh_exit_to_xcom(self, request, dag_maker): # Test pulls the value previously pushed to xcom and checks if it's the same command = "not_a_real_command" - ssh_exit_code = randrange(1, 100) + ssh_exit_code = random.randrange(1, 100) self.exec_ssh_client_command.return_value = (ssh_exit_code, b"", b"ssh output") with dag_maker(dag_id=f"dag_{request.node.name}"): diff --git a/tests/system/providers/google/cloud/dataplex/resources/spark_example_pi.py b/tests/system/providers/google/cloud/dataplex/resources/spark_example_pi.py index 77a7cb9e6ca41..be477d202adc7 100644 --- a/tests/system/providers/google/cloud/dataplex/resources/spark_example_pi.py +++ b/tests/system/providers/google/cloud/dataplex/resources/spark_example_pi.py @@ -17,9 +17,9 @@ from __future__ import annotations +import random import sys from operator import add -from random import random from pyspark.sql import SparkSession @@ -33,8 +33,8 @@ n = 100000 * partitions def f(_: int) -> float: - x = random() * 2 - 1 - y = random() * 2 - 1 + x = random.random() * 2 - 1 + y = random.random() * 2 - 1 return 1 if x**2 + y**2 <= 1 else 0 count = spark.sparkContext.parallelize(range(1, n + 1), partitions).map(f).reduce(add)