From 161448f276cd6fbe697c83e2ec55c4828e9143fa Mon Sep 17 00:00:00 2001 From: David Cavazos Date: Wed, 17 May 2023 13:00:22 -0700 Subject: [PATCH] chore: update type hints to comply with PEP-0585 (#9974) ## Description Updates the type hints to comply with [PEP-0585](https://peps.python.org/pep-0585). The script covers most cases, but a handful still required manual intervention to normalize them. Note: Before submitting a pull request, please open an issue for discussion if you are not associated with Google. ## Checklist - [ ] I have followed [Sample Guidelines from AUTHORING_GUIDE.MD](https://togithub.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md) - [ ] README is updated to include [all relevant information](https://togithub.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md#readme-file) - [ ] **Tests** pass: `nox -s py-3.9` (see [Test Environment Setup](https://togithub.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md#test-environment-setup)) - [ ] **Lint** pass: `nox -s lint` (see [Test Environment Setup](https://togithub.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md#test-environment-setup)) - [ ] These samples need a new **API enabled** in testing projects to pass (let us know which ones) - [ ] These samples need a new/updated **env vars** in testing projects set to pass (let us know which ones) - [ ] This sample adds a new sample directory, and I updated the [CODEOWNERS file](https://togithub.com/GoogleCloudPlatform/python-docs-samples/blob/main/.github/CODEOWNERS) with the codeowners for this sample - [ ] This sample adds a new **Product API**, and I updated the [Blunderbuss issue/PR auto-assigner](https://togithub.com/GoogleCloudPlatform/python-docs-samples/blob/main/.github/blunderbuss.yml) with the codeowners for this sample - [ ] Please **merge** this PR for me once it is approved --- appengine/flexible/storage/main.py | 2 + .../storage/main.py | 5 +- batch/list/list_jobs.py | 4 +- batch/list/list_tasks.py | 4 +- batch/logs/read_job_logs.py | 2 + batch/tests/test_basics.py | 4 +- bigquery/remote-function/translate/main.py | 7 +- .../cloud_sql_connection_pool_test.py | 11 +- cloud-sql/mysql/sqlalchemy/app.py | 5 +- .../cloud_sql_connection_pool_test.py | 10 +- cloud-sql/postgres/sqlalchemy/app.py | 5 +- .../cloud_sql_connection_pool_test.py | 9 +- cloud-sql/sql-server/sqlalchemy/app.py | 5 +- .../cicd_sample/utils/add_dags_to_composer.py | 5 +- .../composer2/composer2_airflow_rest_api.py | 2 + composer/tools/composer_dags.py | 16 +- .../disks/create_kms_encrypted_disk.py | 7 +- .../client_library/ingredients/disks/list.py | 4 +- .../disks/regional_create_from_source.py | 8 +- .../ingredients/firewall/list.py | 6 +- .../ingredients/images/create.py | 5 +- .../ingredients/images/create_from_image.py | 10 +- .../images/create_from_snapshot.py | 10 +- .../ingredients/images/list_images.py | 6 +- .../ingredients/instance-templates/list.py | 6 +- .../ingredients/instances/bulk_insert.py | 8 +- .../ingredients/instances/create_instance.py | 7 +- .../create_windows_instance.py | 7 +- .../create_with_existing_disks.py | 5 +- .../create_extra_mem_no_helper.py | 7 +- .../create_without_helper.py | 7 +- .../ingredients/instances/list.py | 6 +- .../ingredients/instances/list_all.py | 6 +- .../preemptible/preemption_history.py | 7 +- .../operations/handle_extended_operation.py | 2 + .../ingredients/routes/create.py | 13 +- .../client_library/ingredients/routes/list.py | 4 +- .../ingredients/snapshots/create.py | 7 +- .../ingredients/snapshots/list.py | 4 +- compute/client_library/sgs.py | 9 +- .../snippets/disks/attach_disk.py | 2 + .../snippets/disks/autodelete_change.py | 2 + .../snippets/disks/clone_encrypted_disk.py | 2 + .../disks/clone_encrypted_disk_managed_key.py | 2 + .../snippets/disks/create_empty_disk.py | 2 + .../snippets/disks/create_from_image.py | 2 + .../snippets/disks/create_from_snapshot.py | 2 + .../snippets/disks/create_from_source.py | 2 + .../disks/create_kms_encrypted_disk.py | 8 +- .../client_library/snippets/disks/delete.py | 2 + compute/client_library/snippets/disks/list.py | 4 +- .../disks/regional_create_from_source.py | 9 +- .../snippets/disks/regional_delete.py | 2 + .../snippets/disks/resize_disk.py | 2 + .../snippets/firewall/create.py | 2 + .../snippets/firewall/delete.py | 2 + .../client_library/snippets/firewall/list.py | 4 +- .../client_library/snippets/firewall/main.py | 4 +- .../client_library/snippets/firewall/patch.py | 2 + .../snippets/firewall/windows_kms.py | 2 + .../client_library/snippets/images/create.py | 6 +- .../snippets/images/create_from_image.py | 11 +- .../snippets/images/create_from_snapshot.py | 11 +- .../client_library/snippets/images/delete.py | 2 + .../client_library/snippets/images/list.py | 4 +- .../snippets/images/set_deprecation_status.py | 2 + .../snippets/instance_templates/create.py | 2 + .../create_from_instance.py | 2 + .../instance_templates/create_with_subnet.py | 2 + .../snippets/instance_templates/delete.py | 2 + .../snippets/instance_templates/list.py | 4 +- .../snippets/instances/bulk_insert.py | 9 +- .../snippets/instances/change_machine_type.py | 2 + .../snippets/instances/create.py | 8 +- .../create_from_custom_image.py | 8 +- .../create_from_public_image.py | 8 +- .../create_from_snapshot.py | 8 +- .../create_windows_instance.py | 10 +- .../create_with_additional_disk.py | 8 +- .../create_with_existing_disks.py | 10 +- .../create_with_local_ssd.py | 8 +- .../create_with_snapshotted_data_disk.py | 8 +- .../snippets/instances/create_with_subnet.py | 8 +- .../instances/custom_hostname/create.py | 8 +- .../create_shared_with_helper.py | 8 +- .../create_with_helper.py | 8 +- .../create_without_helper.py | 10 +- .../extra_mem_no_helper.py | 10 +- .../custom_machine_types/update_memory.py | 2 + .../snippets/instances/delete.py | 2 + .../instances/delete_protection/create.py | 8 +- .../instances/delete_protection/set.py | 2 + .../create_from_template.py | 2 + .../create_from_template_with_overrides.py | 2 + .../client_library/snippets/instances/list.py | 4 +- .../snippets/instances/list_all.py | 6 +- .../preemptible/create_preemptible.py | 8 +- .../preemptible/preemption_history.py | 5 +- .../snippets/instances/reset.py | 2 + .../snippets/instances/resume.py | 2 + .../snippets/instances/spot/create.py | 8 +- .../snippets/instances/start.py | 2 + .../snippets/instances/start_encrypted.py | 2 + .../client_library/snippets/instances/stop.py | 2 + .../snippets/instances/suspend.py | 2 + .../operations/wait_for_extended_operation.py | 2 + .../client_library/snippets/routes/create.py | 14 +- .../snippets/routes/create_kms_route.py | 14 +- .../client_library/snippets/routes/delete.py | 2 + .../client_library/snippets/routes/list.py | 4 +- .../snippets/snapshots/create.py | 12 +- .../snippets/snapshots/delete.py | 2 + .../snippets/snapshots/delete_by_filter.py | 5 +- .../client_library/snippets/snapshots/list.py | 4 +- .../snippets/usage_report/usage_reports.py | 2 + compute/load_balancing/create_certificate.py | 5 +- .../create_regional_certificate.py | 7 +- .../oslogin/oslogin_service_account_ssh.py | 25 +- container/snippets/create_cluster.py | 7 +- container/snippets/delete_cluster.py | 7 +- contentwarehouse/snippets/set_acl_sample.py | 6 +- data-science-onramp/vertex-ai/sklearn_test.py | 7 +- data-science-onramp/vertex-ai/tfkeras_test.py | 7 +- dataflow/conftest.py | 55 ++- dataflow/custom-containers/miniconda/main.py | 5 +- dataflow/custom-containers/minimal/main.py | 5 +- dataflow/custom-containers/ubuntu/main.py | 5 +- .../streaming_beam/streaming_beam.py | 8 +- dataflow/gpu-examples/pytorch-minimal/main.py | 5 +- .../tensorflow-landsat-prime/main.py | 20 +- .../gpu-examples/tensorflow-landsat/main.py | 20 +- .../gpu-examples/tensorflow-minimal/main.py | 5 +- datastore/cloud-client/tasks.py | 7 +- discoveryengine/import_documents_sample.py | 9 +- discoveryengine/list_operations_sample.py | 5 +- dlp/snippets/deid.py | 114 +++---- dlp/snippets/jobs.py | 19 +- .../snippets/process_document_form_sample.py | 4 +- .../snippets/process_document_ocr_sample.py | 4 +- .../process_document_splitter_sample.py | 4 +- .../search/lookup_public_kg_sample.py | 4 +- .../search/lookup_sample.py | 4 +- .../search/search_public_kg_sample.py | 4 +- .../search/search_sample.py | 4 +- generative_ai/tuning.py | 5 +- logging/redaction/log_redaction.py | 5 +- logging/redaction/log_redaction_final.py | 5 +- noxfile-template.py | 9 +- .../geospatial-classification/e2e_test.py | 2 + .../geospatial-classification/task.py | 7 +- .../create_images_metadata_table.py | 8 +- .../image-classification/predict.py | 5 +- .../image-classification/train_model.py | 17 +- .../create_datasets.py | 7 +- .../timeseries-classification/data_utils.py | 8 +- .../timeseries-classification/predict.py | 9 +- .../timeseries-classification/trainer.py | 18 +- .../weather-forecasting/create_dataset.py | 3 +- .../serving/weather-model/weather/model.py | 4 +- .../spark-connector/spark_streaming_test.py | 3 +- recaptcha_enterprise/demosite/app/urls.py | 5 +- .../snippets/test_create_assessment.py | 3 +- run/deployment-previews/check_status.py | 4 +- run/deployment-previews/test_app.py | 2 + run/idp-sql/database.py | 2 + run/idp-sql/middleware.py | 5 +- scripts/convert-types.py | 322 ++++++++++++++++++ 167 files changed, 1023 insertions(+), 441 deletions(-) create mode 100644 scripts/convert-types.py diff --git a/appengine/flexible/storage/main.py b/appengine/flexible/storage/main.py index fa04e9b21aef..785e95dd3631 100644 --- a/appengine/flexible/storage/main.py +++ b/appengine/flexible/storage/main.py @@ -13,6 +13,8 @@ # limitations under the License. # [START gae_flex_storage_app] +from __future__ import annotations + import logging import os diff --git a/appengine/flexible_python37_and_earlier/storage/main.py b/appengine/flexible_python37_and_earlier/storage/main.py index 6a20d7c05c57..b7361ccb5ce3 100644 --- a/appengine/flexible_python37_and_earlier/storage/main.py +++ b/appengine/flexible_python37_and_earlier/storage/main.py @@ -13,9 +13,10 @@ # limitations under the License. # [START gae_flex_storage_app] +from __future__ import annotations + import logging import os -from typing import Union from flask import Flask, request from google.cloud import storage @@ -68,7 +69,7 @@ def upload() -> str: @app.errorhandler(500) -def server_error(e: Union[Exception, int]) -> str: +def server_error(e: Exception | int) -> str: logging.exception('An error occurred during a request.') return """ An internal error occurred:
{}
diff --git a/batch/list/list_jobs.py b/batch/list/list_jobs.py index e52f4defe876..8dcf1327ab8e 100644 --- a/batch/list/list_jobs.py +++ b/batch/list/list_jobs.py @@ -13,7 +13,9 @@ # limitations under the License. # [START batch_list_jobs] -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import batch_v1 diff --git a/batch/list/list_tasks.py b/batch/list/list_tasks.py index 9ef6674ec8d1..70f3c926ba00 100644 --- a/batch/list/list_tasks.py +++ b/batch/list/list_tasks.py @@ -13,7 +13,9 @@ # limitations under the License. # [START batch_list_tasks] -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import batch_v1 diff --git a/batch/logs/read_job_logs.py b/batch/logs/read_job_logs.py index d9c227a1082b..ad3d5c19fb45 100644 --- a/batch/logs/read_job_logs.py +++ b/batch/logs/read_job_logs.py @@ -14,6 +14,8 @@ # [START batch_job_logs] +from __future__ import annotations + from typing import NoReturn from google.cloud import batch_v1 diff --git a/batch/tests/test_basics.py b/batch/tests/test_basics.py index 0998a63c76cb..541509d556cb 100644 --- a/batch/tests/test_basics.py +++ b/batch/tests/test_basics.py @@ -11,8 +11,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + +from collections.abc import Callable import time -from typing import Callable import uuid from flaky import flaky diff --git a/bigquery/remote-function/translate/main.py b/bigquery/remote-function/translate/main.py index a7466b293ec6..eba7cc4452d5 100644 --- a/bigquery/remote-function/translate/main.py +++ b/bigquery/remote-function/translate/main.py @@ -13,7 +13,8 @@ # limitations under the License. # [START bigquery_remote_function_translation] -from typing import List +from __future__ import annotations + import flask import functions_framework @@ -89,8 +90,8 @@ def extract_project_from_caller(job: str) -> str: def translate_text( - calls: List[str], project: str, target_language_code: str -) -> List[str]: + calls: list[str], project: str, target_language_code: str +) -> list[str]: """Translates the input text to specified language using Translation API. Args: diff --git a/cloud-sql/mysql/client-side-encryption/snippets/cloud_sql_connection_pool_test.py b/cloud-sql/mysql/client-side-encryption/snippets/cloud_sql_connection_pool_test.py index c226ba4f7acc..e6c398ea1213 100644 --- a/cloud-sql/mysql/client-side-encryption/snippets/cloud_sql_connection_pool_test.py +++ b/cloud-sql/mysql/client-side-encryption/snippets/cloud_sql_connection_pool_test.py @@ -26,8 +26,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import os -from typing import Dict import uuid import pytest @@ -40,7 +41,7 @@ @pytest.fixture(name="conn_vars") -def setup() -> Dict[str, str]: +def setup() -> dict[str, str]: try: conn_vars = {} conn_vars["db_user"] = os.environ["MYSQL_USER"] @@ -61,7 +62,7 @@ def setup() -> Dict[str, str]: def test_init_tcp_connection_engine( capsys: pytest.CaptureFixture, - conn_vars: Dict[str, str]) -> None: + conn_vars: dict[str, str]) -> None: init_tcp_connection_engine( db_user=conn_vars["db_user"], @@ -76,7 +77,7 @@ def test_init_tcp_connection_engine( def test_init_unix_connection_engine( capsys: pytest.CaptureFixture, - conn_vars: Dict[str, str]) -> None: + conn_vars: dict[str, str]) -> None: init_unix_connection_engine( db_user=conn_vars["db_user"], @@ -92,7 +93,7 @@ def test_init_unix_connection_engine( def test_init_db( capsys: pytest.CaptureFixture, - conn_vars: Dict[str, str]) -> None: + conn_vars: dict[str, str]) -> None: table_name = f"votes_{uuid.uuid4().hex}" diff --git a/cloud-sql/mysql/sqlalchemy/app.py b/cloud-sql/mysql/sqlalchemy/app.py index 016c5119a5e8..032cbb74e20f 100644 --- a/cloud-sql/mysql/sqlalchemy/app.py +++ b/cloud-sql/mysql/sqlalchemy/app.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import datetime import logging import os -from typing import Dict from flask import Flask, render_template, request, Response @@ -92,7 +93,7 @@ def cast_vote() -> Response: # get_index_context gets data required for rendering HTML application -def get_index_context(db: sqlalchemy.engine.base.Engine) -> Dict: +def get_index_context(db: sqlalchemy.engine.base.Engine) -> dict: votes = [] with db.connect() as conn: diff --git a/cloud-sql/postgres/client-side-encryption/snippets/cloud_sql_connection_pool_test.py b/cloud-sql/postgres/client-side-encryption/snippets/cloud_sql_connection_pool_test.py index b4b251dc165f..3facff15c611 100644 --- a/cloud-sql/postgres/client-side-encryption/snippets/cloud_sql_connection_pool_test.py +++ b/cloud-sql/postgres/client-side-encryption/snippets/cloud_sql_connection_pool_test.py @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import os -from typing import Dict import uuid import pytest @@ -27,7 +27,7 @@ @pytest.fixture(name="conn_vars") -def setup() -> Dict[str, str]: +def setup() -> dict[str, str]: try: conn_vars = {} conn_vars["db_user"] = os.environ["POSTGRES_USER"] @@ -48,7 +48,7 @@ def setup() -> Dict[str, str]: def test_init_tcp_connection_engine( capsys: pytest.CaptureFixture, - conn_vars: Dict[str, str]) -> None: + conn_vars: dict[str, str]) -> None: init_tcp_connection_engine( db_user=conn_vars["db_user"], @@ -63,7 +63,7 @@ def test_init_tcp_connection_engine( def test_init_unix_connection_engine( capsys: pytest.CaptureFixture, - conn_vars: Dict[str, str]) -> None: + conn_vars: dict[str, str]) -> None: init_unix_connection_engine( db_user=conn_vars["db_user"], @@ -79,7 +79,7 @@ def test_init_unix_connection_engine( def test_init_db( capsys: pytest.CaptureFixture, - conn_vars: Dict[str, str]) -> None: + conn_vars: dict[str, str]) -> None: table_name = f"votes_{uuid.uuid4().hex}" diff --git a/cloud-sql/postgres/sqlalchemy/app.py b/cloud-sql/postgres/sqlalchemy/app.py index 670f14c16fe1..02c2ffbc9d57 100644 --- a/cloud-sql/postgres/sqlalchemy/app.py +++ b/cloud-sql/postgres/sqlalchemy/app.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import datetime import logging import os -from typing import Dict from flask import Flask, render_template, request, Response @@ -92,7 +93,7 @@ def cast_vote() -> Response: # get_index_context gets data required for rendering HTML application -def get_index_context(db: sqlalchemy.engine.base.Engine) -> Dict: +def get_index_context(db: sqlalchemy.engine.base.Engine) -> dict: votes = [] with db.connect() as conn: diff --git a/cloud-sql/sql-server/client-side-encryption/snippets/cloud_sql_connection_pool_test.py b/cloud-sql/sql-server/client-side-encryption/snippets/cloud_sql_connection_pool_test.py index 664acfdeebe4..1e30e88337bd 100644 --- a/cloud-sql/sql-server/client-side-encryption/snippets/cloud_sql_connection_pool_test.py +++ b/cloud-sql/sql-server/client-side-encryption/snippets/cloud_sql_connection_pool_test.py @@ -12,8 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import os -from typing import Dict import uuid import pytest @@ -25,7 +26,7 @@ @pytest.fixture(name="conn_vars") -def setup() -> Dict[str, str]: +def setup() -> dict[str, str]: try: conn_vars = {} conn_vars["db_user"] = os.environ["SQLSERVER_USER"] @@ -45,7 +46,7 @@ def setup() -> Dict[str, str]: def test_init_tcp_connection_engine( capsys: pytest.CaptureFixture, - conn_vars: Dict[str, str]) -> None: + conn_vars: dict[str, str]) -> None: init_tcp_connection_engine( db_user=conn_vars["db_user"], @@ -60,7 +61,7 @@ def test_init_tcp_connection_engine( def test_init_db( capsys: pytest.CaptureFixture, - conn_vars: Dict[str, str]) -> None: + conn_vars: dict[str, str]) -> None: table_name = f"votes_{uuid.uuid4().hex}" diff --git a/cloud-sql/sql-server/sqlalchemy/app.py b/cloud-sql/sql-server/sqlalchemy/app.py index 2ee47284fcb0..db33ce8a1a13 100644 --- a/cloud-sql/sql-server/sqlalchemy/app.py +++ b/cloud-sql/sql-server/sqlalchemy/app.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import datetime import logging import os -from typing import Dict from flask import Flask, render_template, request, Response import sqlalchemy @@ -91,7 +92,7 @@ def cast_vote() -> Response: return save_vote(db, team) -def get_index_context(db: sqlalchemy.engine.base.Engine) -> Dict: +def get_index_context(db: sqlalchemy.engine.base.Engine) -> dict: votes = [] with db.connect() as conn: # Execute the query and fetch all results diff --git a/composer/cicd_sample/utils/add_dags_to_composer.py b/composer/cicd_sample/utils/add_dags_to_composer.py index 82023e4df38a..8e5698f0ba8f 100644 --- a/composer/cicd_sample/utils/add_dags_to_composer.py +++ b/composer/cicd_sample/utils/add_dags_to_composer.py @@ -13,18 +13,19 @@ # limitations under the License. # [START composer_cicd_add_dags_to_composer_utility] +from __future__ import annotations + import argparse import glob import os from shutil import copytree, ignore_patterns import tempfile -from typing import List, Tuple # Imports the Google Cloud client library from google.cloud import storage -def _create_dags_list(dags_directory: str) -> Tuple[str, List[str]]: +def _create_dags_list(dags_directory: str) -> tuple[str, list[str]]: temp_dir = tempfile.mkdtemp() # ignore non-DAG Python files diff --git a/composer/rest/composer2/composer2_airflow_rest_api.py b/composer/rest/composer2/composer2_airflow_rest_api.py index 1c527d74e0b6..a5202697f5ef 100644 --- a/composer/rest/composer2/composer2_airflow_rest_api.py +++ b/composer/rest/composer2/composer2_airflow_rest_api.py @@ -19,6 +19,8 @@ # [START composer_2_trigger_dag] # [START composer_2_trigger_dag_for_import] +from __future__ import annotations + from typing import Any import google.auth diff --git a/composer/tools/composer_dags.py b/composer/tools/composer_dags.py index 5b6a3d01c383..9a815c478c1d 100644 --- a/composer/tools/composer_dags.py +++ b/composer/tools/composer_dags.py @@ -15,13 +15,15 @@ # limitations under the License. """Standalone script to pause/unpause all the dags in the specific environment.""" +from __future__ import annotations + import argparse import json import logging import re import subprocess import sys -import typing +from typing import Any logging.basicConfig(level=logging.DEBUG, format="%(asctime)s - %(message)s") logger = logging.getLogger(__name__) @@ -36,7 +38,7 @@ class DAG: @staticmethod def get_list_of_dags(project_name: str, environment: str, location: str, sdk_endpoint: str, - airflow_version: typing.Tuple[int]) -> typing.List[str]: + airflow_version: tuple[int]) -> list[str]: """Retrieves the list of dags for particular project.""" sub_command = ("list_dags" if airflow_version < (2, 0, 0) else "dags list") command = ( @@ -60,7 +62,7 @@ def _run_shell_command_locally_once( command: str, command_input: str = None, log_command: bool = True, - ) -> typing.Tuple[int, str]: + ) -> tuple[int, str]: """Executes shell command and returns its output.""" p = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True) @@ -78,7 +80,7 @@ def _run_shell_command_locally_once( @staticmethod def pause_dag(project_name: str, environment: str, location: str, sdk_endpoint: str, dag_id: str, - airflow_version: typing.List[int]) -> str: + airflow_version: list[int]) -> str: """Pause specific DAG in the given environment.""" sub_command = ("pause" if airflow_version < (2, 0, 0) else "dags pause") command = ( @@ -97,7 +99,7 @@ def pause_dag(project_name: str, environment: str, location: str, @staticmethod def unpause_dag(project_name: str, environment: str, location: str, sdk_endpoint: str, dag_id: str, - airflow_version: typing.List[int]) -> str: + airflow_version: list[int]) -> str: """UnPause specific DAG in the given environment.""" sub_command = ("unpause" if airflow_version < (2, 0, 0) else "dags unpause") command = ( @@ -115,7 +117,7 @@ def unpause_dag(project_name: str, environment: str, location: str, @staticmethod def describe_environment(project_name: str, environment: str, location: str, - sdk_endpoint: str) -> typing.Any: + sdk_endpoint: str) -> Any: """Returns the given environment json object to parse necessary details.""" logger.info("*** Fetching details of the environment: %s...", environment) command = ( @@ -175,7 +177,7 @@ def main(project_name: str, return 0 -def parse_arguments() -> typing.Dict[typing.Any, typing.Any]: +def parse_arguments() -> dict[Any, Any]: """Parses command line parameters.""" argument_parser = argparse.ArgumentParser( usage="Script to Pause/UnPause DAGs in Cloud Composer Environment \n") diff --git a/compute/client_library/ingredients/disks/create_kms_encrypted_disk.py b/compute/client_library/ingredients/disks/create_kms_encrypted_disk.py index c115fe10356a..60d534e4dc72 100644 --- a/compute/client_library/ingredients/disks/create_kms_encrypted_disk.py +++ b/compute/client_library/ingredients/disks/create_kms_encrypted_disk.py @@ -16,7 +16,8 @@ # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import Optional +from __future__ import annotations + from google.api_core.exceptions import BadRequest from google.cloud import compute_v1 @@ -30,8 +31,8 @@ def create_kms_encrypted_disk( disk_type: str, disk_size_gb: int, kms_key_name: str, - disk_link: Optional[str] = None, - image_link: Optional[str] = None + disk_link: str | None = None, + image_link: str | None = None ) -> compute_v1.Disk: """ Creates a zonal disk in a project. If you do not provide values for disk_link or image_link, diff --git a/compute/client_library/ingredients/disks/list.py b/compute/client_library/ingredients/disks/list.py index baae94ce9e4d..85aa6adce48b 100644 --- a/compute/client_library/ingredients/disks/list.py +++ b/compute/client_library/ingredients/disks/list.py @@ -16,7 +16,9 @@ # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 diff --git a/compute/client_library/ingredients/disks/regional_create_from_source.py b/compute/client_library/ingredients/disks/regional_create_from_source.py index af6757d7efbd..c635b3f76fbb 100644 --- a/compute/client_library/ingredients/disks/regional_create_from_source.py +++ b/compute/client_library/ingredients/disks/regional_create_from_source.py @@ -16,7 +16,9 @@ # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import Iterable, Optional +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 @@ -25,8 +27,8 @@ def create_regional_disk(project_id: str, region: str, replica_zones: Iterable[str], disk_name: str, disk_type: str, disk_size_gb: int, - disk_link: Optional[str] = None, - snapshot_link: Optional[str] = None) -> compute_v1.Disk: + disk_link: str | None = None, + snapshot_link: str | None = None) -> compute_v1.Disk: """ Creates a regional disk from an existing zonal disk in a given project. diff --git a/compute/client_library/ingredients/firewall/list.py b/compute/client_library/ingredients/firewall/list.py index 5deeac4e3b7a..440af12ea633 100644 --- a/compute/client_library/ingredients/firewall/list.py +++ b/compute/client_library/ingredients/firewall/list.py @@ -12,11 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -# This is an ingredient file. It is not meant to be run directly. Check the samples/snippets +# This is an ingredient file. It is not meant to be run directly. Check the samples/snippets # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 diff --git a/compute/client_library/ingredients/images/create.py b/compute/client_library/ingredients/images/create.py index b029d7aaa3c1..6a66ede38e5c 100644 --- a/compute/client_library/ingredients/images/create.py +++ b/compute/client_library/ingredients/images/create.py @@ -18,8 +18,9 @@ # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa +from __future__ import annotations + import warnings -from typing import Optional from google.cloud import compute_v1 @@ -31,7 +32,7 @@ def create_image_from_disk(project_id: str, zone: str, source_disk_name: str, image_name: str, - storage_location: Optional[str] = None, force_create: bool = False) -> compute_v1.Image: + storage_location: str | None = None, force_create: bool = False) -> compute_v1.Image: """ Creates a new disk image. diff --git a/compute/client_library/ingredients/images/create_from_image.py b/compute/client_library/ingredients/images/create_from_image.py index 07b6d1c8e762..30324c22ddeb 100644 --- a/compute/client_library/ingredients/images/create_from_image.py +++ b/compute/client_library/ingredients/images/create_from_image.py @@ -17,16 +17,18 @@ # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import Optional, Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 # def create_image_from_image(project_id: str, source_image_name: str, image_name: str, - source_project_id: Optional[str] = None, - guest_os_features: Optional[Iterable[str]] = None, - storage_location: Optional[str] = None) -> compute_v1.Image: + source_project_id: str | None = None, + guest_os_features: Iterable[str] | None = None, + storage_location: str | None = None) -> compute_v1.Image: """ Creates a copy of another image. diff --git a/compute/client_library/ingredients/images/create_from_snapshot.py b/compute/client_library/ingredients/images/create_from_snapshot.py index f882e24914bc..a9fb7cc4ad3b 100644 --- a/compute/client_library/ingredients/images/create_from_snapshot.py +++ b/compute/client_library/ingredients/images/create_from_snapshot.py @@ -17,16 +17,18 @@ # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import Optional, Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 # def create_image_from_snapshot(project_id: str, source_snapshot_name: str, image_name: str, - source_project_id: Optional[str] = None, - guest_os_features: Optional[Iterable[str]] = None, - storage_location: Optional[str] = None) -> compute_v1.Image: + source_project_id: str | None = None, + guest_os_features: Iterable[str] | None = None, + storage_location: str | None = None) -> compute_v1.Image: """ Creates an image based on a snapshot. diff --git a/compute/client_library/ingredients/images/list_images.py b/compute/client_library/ingredients/images/list_images.py index b4c191fc3e22..cc45e11d1d0d 100644 --- a/compute/client_library/ingredients/images/list_images.py +++ b/compute/client_library/ingredients/images/list_images.py @@ -12,11 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -# This is an ingredient file. It is not meant to be run directly. Check the samples/snippets +# This is an ingredient file. It is not meant to be run directly. Check the samples/snippets # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 diff --git a/compute/client_library/ingredients/instance-templates/list.py b/compute/client_library/ingredients/instance-templates/list.py index 851e2c48e508..05a11b0c4093 100644 --- a/compute/client_library/ingredients/instance-templates/list.py +++ b/compute/client_library/ingredients/instance-templates/list.py @@ -12,12 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -# This is an ingredient file. It is not meant to be run directly. Check the samples/snippets +# This is an ingredient file. It is not meant to be run directly. Check the samples/snippets # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 diff --git a/compute/client_library/ingredients/instances/bulk_insert.py b/compute/client_library/ingredients/instances/bulk_insert.py index d7cf578d41af..ac7e84f48a1b 100644 --- a/compute/client_library/ingredients/instances/bulk_insert.py +++ b/compute/client_library/ingredients/instances/bulk_insert.py @@ -12,7 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # flake8: noqa -from typing import Iterable, Optional +from __future__ import annotations + +from collections.abc import Iterable import uuid from google.cloud import compute_v1 @@ -20,8 +22,8 @@ # def bulk_insert_instance(project_id: str, zone: str, template: compute_v1.InstanceTemplate, - count: int, name_pattern: str, min_count: Optional[int] = None, - labels: Optional[dict] = None) -> Iterable[compute_v1.Instance]: + count: int, name_pattern: str, min_count: int | None = None, + labels: dict | None = None) -> Iterable[compute_v1.Instance]: """ Create multiple VMs based on an Instance Template. The newly created instances will be returned as a list and will share a label with key `bulk_batch` and a random diff --git a/compute/client_library/ingredients/instances/create_instance.py b/compute/client_library/ingredients/instances/create_instance.py index afbbbef330a6..4506aaf7a7a3 100644 --- a/compute/client_library/ingredients/instances/create_instance.py +++ b/compute/client_library/ingredients/instances/create_instance.py @@ -17,8 +17,9 @@ # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa +from __future__ import annotations + import re -from typing import List import warnings from google.cloud import compute_v1 @@ -29,14 +30,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", diff --git a/compute/client_library/ingredients/instances/create_start_instance/create_windows_instance.py b/compute/client_library/ingredients/instances/create_start_instance/create_windows_instance.py index ee437e5c5620..e909c273fc17 100644 --- a/compute/client_library/ingredients/instances/create_start_instance/create_windows_instance.py +++ b/compute/client_library/ingredients/instances/create_start_instance/create_windows_instance.py @@ -16,7 +16,8 @@ # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import Optional +from __future__ import annotations + from google.cloud import compute_v1 @@ -25,7 +26,7 @@ def create_windows_instance(project_id: str, zone: str, instance_name: str, machine_type: str, source_image_family: str = "windows-2022", network_link: str = "global/networks/default", - subnetwork_link: Optional[str] = None) -> compute_v1.Instance: + subnetwork_link: str | None = None) -> compute_v1.Instance: """ Creates a new Windows Server instance that has only an internal IP address. @@ -58,7 +59,7 @@ def create_windows_instance(project_id: str, zone: str, instance_name: str, ) disk_type = f"zones/{zone}/diskTypes/pd-standard" disks = [disk_from_image(disk_type, 100, True, base_image.self_link, True)] - + # You must verify or configure routes and firewall rules in your VPC network # to allow access to kms.windows.googlecloud.com. # More information about access to kms.windows.googlecloud.com: https://cloud.google.com/compute/docs/instances/windows/creating-managing-windows-instances#kms-server diff --git a/compute/client_library/ingredients/instances/create_start_instance/create_with_existing_disks.py b/compute/client_library/ingredients/instances/create_start_instance/create_with_existing_disks.py index c4b90a62f93b..b285b6a5bdf0 100644 --- a/compute/client_library/ingredients/instances/create_start_instance/create_with_existing_disks.py +++ b/compute/client_library/ingredients/instances/create_start_instance/create_with_existing_disks.py @@ -16,13 +16,14 @@ # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import List +from __future__ import annotations + from google.cloud import compute_v1 # -def create_with_existing_disks(project_id: str, zone: str, instance_name: str, disk_names: List[str]) -> compute_v1.Instance: +def create_with_existing_disks(project_id: str, zone: str, instance_name: str, disk_names: list[str]) -> compute_v1.Instance: """ Create a new VM instance using selected disks. The first disk in disk_names will be used as boot disk. diff --git a/compute/client_library/ingredients/instances/custom_machine_types/create_extra_mem_no_helper.py b/compute/client_library/ingredients/instances/custom_machine_types/create_extra_mem_no_helper.py index 536455f669d0..90022b04cff1 100644 --- a/compute/client_library/ingredients/instances/custom_machine_types/create_extra_mem_no_helper.py +++ b/compute/client_library/ingredients/instances/custom_machine_types/create_extra_mem_no_helper.py @@ -12,11 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -# This is an ingredient file. It is not meant to be run directly. Check the samples/snippets +# This is an ingredient file. It is not meant to be run directly. Check the samples/snippets # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import List +from __future__ import annotations + from google.cloud import compute_v1 @@ -24,7 +25,7 @@ # def create_custom_instances_extra_mem( project_id: str, zone: str, instance_name: str, core_count: int, memory: int -) -> List[compute_v1.Instance]: +) -> list[compute_v1.Instance]: """ Create 3 new VM instances with extra memory without using a CustomMachineType helper class. diff --git a/compute/client_library/ingredients/instances/custom_machine_types/create_without_helper.py b/compute/client_library/ingredients/instances/custom_machine_types/create_without_helper.py index a17a979bb8fd..9dfbab74fce8 100644 --- a/compute/client_library/ingredients/instances/custom_machine_types/create_without_helper.py +++ b/compute/client_library/ingredients/instances/custom_machine_types/create_without_helper.py @@ -12,13 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -# This is an ingredient file. It is not meant to be run directly. Check the samples/snippets +# This is an ingredient file. It is not meant to be run directly. Check the samples/snippets # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import List +from __future__ import annotations + from google.cloud import compute_v1 @@ -26,7 +27,7 @@ # def create_custom_instances_no_helper( project_id: str, zone: str, instance_name: str, core_count: int, memory: int -) -> List[compute_v1.Instance]: +) -> list[compute_v1.Instance]: """ Create 7 new VM instances without using a CustomMachineType helper function. diff --git a/compute/client_library/ingredients/instances/list.py b/compute/client_library/ingredients/instances/list.py index 089f7fdabb87..3083dfa64145 100644 --- a/compute/client_library/ingredients/instances/list.py +++ b/compute/client_library/ingredients/instances/list.py @@ -12,11 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -# This is an ingredient file. It is not meant to be run directly. Check the samples/snippets +# This is an ingredient file. It is not meant to be run directly. Check the samples/snippets # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 diff --git a/compute/client_library/ingredients/instances/list_all.py b/compute/client_library/ingredients/instances/list_all.py index 60498c4df67f..af82d0b31b13 100644 --- a/compute/client_library/ingredients/instances/list_all.py +++ b/compute/client_library/ingredients/instances/list_all.py @@ -16,8 +16,10 @@ # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa +from __future__ import annotations + from collections import defaultdict -from typing import Dict, Iterable +from collections.abc import Iterable from google.cloud import compute_v1 @@ -25,7 +27,7 @@ # def list_all_instances( project_id: str, -) -> Dict[str, Iterable[compute_v1.Instance]]: +) -> dict[str, Iterable[compute_v1.Instance]]: """ Returns a dictionary of all instances present in a project, grouped by their zone. diff --git a/compute/client_library/ingredients/instances/preemptible/preemption_history.py b/compute/client_library/ingredients/instances/preemptible/preemption_history.py index 53b6a3da767f..a152554fc3bc 100644 --- a/compute/client_library/ingredients/instances/preemptible/preemption_history.py +++ b/compute/client_library/ingredients/instances/preemptible/preemption_history.py @@ -12,18 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -# This is an ingredient file. It is not meant to be run directly. Check the samples/snippets +# This is an ingredient file. It is not meant to be run directly. Check the samples/snippets # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa +from __future__ import annotations + import datetime -from typing import List, Tuple # def preemption_history( project_id: str, zone: str, instance_name: str = None -) -> List[Tuple[str, datetime.datetime]]: +) -> list[tuple[str, datetime.datetime]]: """ Get a list of preemption operations from given zone in a project. Optionally limit the results to instance name. diff --git a/compute/client_library/ingredients/operations/handle_extended_operation.py b/compute/client_library/ingredients/operations/handle_extended_operation.py index f44f044e0b41..f19382d56226 100644 --- a/compute/client_library/ingredients/operations/handle_extended_operation.py +++ b/compute/client_library/ingredients/operations/handle_extended_operation.py @@ -16,6 +16,8 @@ # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/ingredients/routes/create.py b/compute/client_library/ingredients/routes/create.py index a309150c38c6..d2c585bf0926 100644 --- a/compute/client_library/ingredients/routes/create.py +++ b/compute/client_library/ingredients/routes/create.py @@ -16,16 +16,17 @@ # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import Optional +from __future__ import annotations + from google.cloud import compute_v1 # def create_route(project_id: str, network: str, route_name: str, destination_range: str, *, - next_hop_gateway: Optional[str] = None, - next_hop_ip: Optional[str] = None, next_hop_instance: Optional[str] = None, - next_hop_vpn_tunnel: Optional[str] = None, next_hop_ilb: Optional[str] = None) -> compute_v1.Route: + next_hop_gateway: str | None = None, + next_hop_ip: str | None = None, next_hop_instance: str | None = None, + next_hop_vpn_tunnel: str | None = None, next_hop_ilb: str | None = None) -> compute_v1.Route: """ Create a new route in selected network by providing a destination and next hop name. @@ -55,10 +56,10 @@ def create_route(project_id: str, network: str, route_name: str, destination_ran """ excl_args = {next_hop_instance, next_hop_ilb, next_hop_vpn_tunnel, next_hop_gateway, next_hop_ip} args_set = sum(1 if arg is not None else 0 for arg in excl_args) - + if args_set != 1: raise RuntimeError("You must specify exactly one next_hop_* parameter.") - + route = compute_v1.Route() route.name = route_name route.network = network diff --git a/compute/client_library/ingredients/routes/list.py b/compute/client_library/ingredients/routes/list.py index 494f5da6868a..d328411cce93 100644 --- a/compute/client_library/ingredients/routes/list.py +++ b/compute/client_library/ingredients/routes/list.py @@ -16,7 +16,9 @@ # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 diff --git a/compute/client_library/ingredients/snapshots/create.py b/compute/client_library/ingredients/snapshots/create.py index 9b0e0cac78b5..a81c6da2aaae 100644 --- a/compute/client_library/ingredients/snapshots/create.py +++ b/compute/client_library/ingredients/snapshots/create.py @@ -16,15 +16,16 @@ # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import Optional +from __future__ import annotations + from google.cloud import compute_v1 # def create_snapshot(project_id: str, disk_name: str, snapshot_name: str, *, - zone: Optional[str] = None, region: Optional[str] = None, - location: Optional[str] = None, disk_project_id: Optional[str] = None) -> compute_v1.Snapshot: + zone: str | None = None, region: str | None = None, + location: str | None = None, disk_project_id: str | None = None) -> compute_v1.Snapshot: """ Create a snapshot of a disk. diff --git a/compute/client_library/ingredients/snapshots/list.py b/compute/client_library/ingredients/snapshots/list.py index 01317426ddee..2807bf468bca 100644 --- a/compute/client_library/ingredients/snapshots/list.py +++ b/compute/client_library/ingredients/snapshots/list.py @@ -16,7 +16,9 @@ # folder for complete code samples that are ready to be used. # Disabling flake8 for the ingredients file, as it would fail F821 - undefined name check. # flake8: noqa -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 diff --git a/compute/client_library/sgs.py b/compute/client_library/sgs.py index e3fa70a8b94c..e8ae517ae704 100644 --- a/compute/client_library/sgs.py +++ b/compute/client_library/sgs.py @@ -15,6 +15,8 @@ This script is used to generate the full code samples inside the `snippets` directory, to be then used in Google Compute Engine public documentation. """ +from __future__ import annotations + import argparse import ast from collections import defaultdict @@ -25,7 +27,6 @@ from pathlib import Path import re import subprocess -from typing import List, Tuple import warnings import isort @@ -73,8 +74,8 @@ class Ingredient: text that will be pasted into the snippet. """ - simple_imports: List[ImportItem] = field(default_factory=list) - imports_from: List[Tuple[str, ImportItem]] = field(default_factory=list) + simple_imports: list[ImportItem] = field(default_factory=list) + imports_from: list[tuple[str, ImportItem]] = field(default_factory=list) text: str = "" name: str = "" @@ -94,7 +95,7 @@ def __repr__(self): ) -def parse_imports(script: str) -> Tuple[List[ImportItem], List[Tuple[str, ImportItem]]]: +def parse_imports(script: str) -> tuple[list[ImportItem], list[tuple[str, ImportItem]]]: """ Reads a Python script file and analyzes it to extract information about the various things it imports. Returns a pair of lists containing diff --git a/compute/client_library/snippets/disks/attach_disk.py b/compute/client_library/snippets/disks/attach_disk.py index 4659c905e783..0bbd025c8d3c 100644 --- a/compute/client_library/snippets/disks/attach_disk.py +++ b/compute/client_library/snippets/disks/attach_disk.py @@ -21,6 +21,8 @@ # [START compute_regional_disk_attach] # [START compute_disk_attach] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/disks/autodelete_change.py b/compute/client_library/snippets/disks/autodelete_change.py index 82671e53e19d..2cca38311d96 100644 --- a/compute/client_library/snippets/disks/autodelete_change.py +++ b/compute/client_library/snippets/disks/autodelete_change.py @@ -20,6 +20,8 @@ # [START compute_disk_autodelete_change] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/disks/clone_encrypted_disk.py b/compute/client_library/snippets/disks/clone_encrypted_disk.py index 04fb7a670eef..00b118042035 100644 --- a/compute/client_library/snippets/disks/clone_encrypted_disk.py +++ b/compute/client_library/snippets/disks/clone_encrypted_disk.py @@ -20,6 +20,8 @@ # [START compute_disk_clone_encrypted_disk] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/disks/clone_encrypted_disk_managed_key.py b/compute/client_library/snippets/disks/clone_encrypted_disk_managed_key.py index 5236f57aa793..c38d116d20f1 100644 --- a/compute/client_library/snippets/disks/clone_encrypted_disk_managed_key.py +++ b/compute/client_library/snippets/disks/clone_encrypted_disk_managed_key.py @@ -20,6 +20,8 @@ # [START compute_disk_clone_encrypted_disk_kms] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/disks/create_empty_disk.py b/compute/client_library/snippets/disks/create_empty_disk.py index d47cc396bdfa..486b86e83480 100644 --- a/compute/client_library/snippets/disks/create_empty_disk.py +++ b/compute/client_library/snippets/disks/create_empty_disk.py @@ -20,6 +20,8 @@ # [START compute_disk_create_empty_disk] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/disks/create_from_image.py b/compute/client_library/snippets/disks/create_from_image.py index 1b12cf7fd0ef..04fd0e3ec6be 100644 --- a/compute/client_library/snippets/disks/create_from_image.py +++ b/compute/client_library/snippets/disks/create_from_image.py @@ -20,6 +20,8 @@ # [START compute_disk_create_from_image] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/disks/create_from_snapshot.py b/compute/client_library/snippets/disks/create_from_snapshot.py index 0cd5b6dca4f1..4f8ed23a87dd 100644 --- a/compute/client_library/snippets/disks/create_from_snapshot.py +++ b/compute/client_library/snippets/disks/create_from_snapshot.py @@ -20,6 +20,8 @@ # [START compute_disk_create_from_snapshot] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/disks/create_from_source.py b/compute/client_library/snippets/disks/create_from_source.py index 0dff5a58b570..17f1aff87761 100644 --- a/compute/client_library/snippets/disks/create_from_source.py +++ b/compute/client_library/snippets/disks/create_from_source.py @@ -20,6 +20,8 @@ # [START compute_disk_create_from_disk] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/disks/create_kms_encrypted_disk.py b/compute/client_library/snippets/disks/create_kms_encrypted_disk.py index 58ec81049f18..037344a3f3f4 100644 --- a/compute/client_library/snippets/disks/create_kms_encrypted_disk.py +++ b/compute/client_library/snippets/disks/create_kms_encrypted_disk.py @@ -20,8 +20,10 @@ # [START compute_create_kms_encrypted_disk] +from __future__ import annotations + import sys -from typing import Any, Optional +from typing import Any from google.api_core.exceptions import BadRequest from google.api_core.extended_operation import ExtendedOperation @@ -83,8 +85,8 @@ def create_kms_encrypted_disk( disk_type: str, disk_size_gb: int, kms_key_name: str, - disk_link: Optional[str] = None, - image_link: Optional[str] = None, + disk_link: str | None = None, + image_link: str | None = None, ) -> compute_v1.Disk: """ Creates a zonal disk in a project. If you do not provide values for disk_link or image_link, diff --git a/compute/client_library/snippets/disks/delete.py b/compute/client_library/snippets/disks/delete.py index 1d79f68bf3e8..bffdd4da09f1 100644 --- a/compute/client_library/snippets/disks/delete.py +++ b/compute/client_library/snippets/disks/delete.py @@ -20,6 +20,8 @@ # [START compute_disk_delete] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/disks/list.py b/compute/client_library/snippets/disks/list.py index 38cadc145d12..305b0d582c75 100644 --- a/compute/client_library/snippets/disks/list.py +++ b/compute/client_library/snippets/disks/list.py @@ -20,7 +20,9 @@ # [START compute_disk_list] -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 diff --git a/compute/client_library/snippets/disks/regional_create_from_source.py b/compute/client_library/snippets/disks/regional_create_from_source.py index 6a5ad48f0153..7d46b4dbc826 100644 --- a/compute/client_library/snippets/disks/regional_create_from_source.py +++ b/compute/client_library/snippets/disks/regional_create_from_source.py @@ -20,8 +20,11 @@ # [START compute_regional_disk_create_from_disk] +from __future__ import annotations + +from collections.abc import Iterable import sys -from typing import Any, Iterable, Optional +from typing import Any from google.api_core.extended_operation import ExtendedOperation from google.cloud import compute_v1 @@ -82,8 +85,8 @@ def create_regional_disk( disk_name: str, disk_type: str, disk_size_gb: int, - disk_link: Optional[str] = None, - snapshot_link: Optional[str] = None, + disk_link: str | None = None, + snapshot_link: str | None = None, ) -> compute_v1.Disk: """ Creates a regional disk from an existing zonal disk in a given project. diff --git a/compute/client_library/snippets/disks/regional_delete.py b/compute/client_library/snippets/disks/regional_delete.py index bfa4d1bff74c..b509d6bcfd0f 100644 --- a/compute/client_library/snippets/disks/regional_delete.py +++ b/compute/client_library/snippets/disks/regional_delete.py @@ -20,6 +20,8 @@ # [START compute_regional_disk_delete] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/disks/resize_disk.py b/compute/client_library/snippets/disks/resize_disk.py index 1bba000e2f26..ae8d19153846 100644 --- a/compute/client_library/snippets/disks/resize_disk.py +++ b/compute/client_library/snippets/disks/resize_disk.py @@ -21,6 +21,8 @@ # [START compute_regional_disk_resize] # [START compute_disk_resize] +from __future__ import annotations + import re import sys from typing import Any diff --git a/compute/client_library/snippets/firewall/create.py b/compute/client_library/snippets/firewall/create.py index 3880e01bed94..2d8d1fd81af0 100644 --- a/compute/client_library/snippets/firewall/create.py +++ b/compute/client_library/snippets/firewall/create.py @@ -20,6 +20,8 @@ # [START compute_firewall_create] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/firewall/delete.py b/compute/client_library/snippets/firewall/delete.py index 39d583451ec5..48c18c9a324f 100644 --- a/compute/client_library/snippets/firewall/delete.py +++ b/compute/client_library/snippets/firewall/delete.py @@ -20,6 +20,8 @@ # [START compute_firewall_delete] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/firewall/list.py b/compute/client_library/snippets/firewall/list.py index 7a0636ae89cd..66e1ec7d3db7 100644 --- a/compute/client_library/snippets/firewall/list.py +++ b/compute/client_library/snippets/firewall/list.py @@ -20,7 +20,9 @@ # [START compute_firewall_list] -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 diff --git a/compute/client_library/snippets/firewall/main.py b/compute/client_library/snippets/firewall/main.py index 52b34126f2a1..74241271e86f 100644 --- a/compute/client_library/snippets/firewall/main.py +++ b/compute/client_library/snippets/firewall/main.py @@ -19,7 +19,9 @@ # directory and apply your changes there. -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 diff --git a/compute/client_library/snippets/firewall/patch.py b/compute/client_library/snippets/firewall/patch.py index 0b39d705340e..32d088b4417a 100644 --- a/compute/client_library/snippets/firewall/patch.py +++ b/compute/client_library/snippets/firewall/patch.py @@ -20,6 +20,8 @@ # [START compute_firewall_patch] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/firewall/windows_kms.py b/compute/client_library/snippets/firewall/windows_kms.py index bab5b8149ed4..c93173e59d07 100644 --- a/compute/client_library/snippets/firewall/windows_kms.py +++ b/compute/client_library/snippets/firewall/windows_kms.py @@ -20,6 +20,8 @@ # [START compute_create_egress_rule_windows_activation] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/images/create.py b/compute/client_library/snippets/images/create.py index ff1e6c7cc416..ec718d1b59c1 100644 --- a/compute/client_library/snippets/images/create.py +++ b/compute/client_library/snippets/images/create.py @@ -21,8 +21,10 @@ # [START compute_windows_image_create] # [START compute_images_create] +from __future__ import annotations + import sys -from typing import Any, Optional +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -88,7 +90,7 @@ def create_image_from_disk( zone: str, source_disk_name: str, image_name: str, - storage_location: Optional[str] = None, + storage_location: str | None = None, force_create: bool = False, ) -> compute_v1.Image: """ diff --git a/compute/client_library/snippets/images/create_from_image.py b/compute/client_library/snippets/images/create_from_image.py index 314875ce3d10..d35e9d0e72a7 100644 --- a/compute/client_library/snippets/images/create_from_image.py +++ b/compute/client_library/snippets/images/create_from_image.py @@ -20,8 +20,11 @@ # [START compute_images_create_from_image] +from __future__ import annotations + +from collections.abc import Iterable import sys -from typing import Any, Iterable, Optional +from typing import Any from google.api_core.extended_operation import ExtendedOperation from google.cloud import compute_v1 @@ -79,9 +82,9 @@ def create_image_from_image( project_id: str, source_image_name: str, image_name: str, - source_project_id: Optional[str] = None, - guest_os_features: Optional[Iterable[str]] = None, - storage_location: Optional[str] = None, + source_project_id: str | None = None, + guest_os_features: Iterable[str] | None = None, + storage_location: str | None = None, ) -> compute_v1.Image: """ Creates a copy of another image. diff --git a/compute/client_library/snippets/images/create_from_snapshot.py b/compute/client_library/snippets/images/create_from_snapshot.py index f0d7ae472b76..00c38c41bcab 100644 --- a/compute/client_library/snippets/images/create_from_snapshot.py +++ b/compute/client_library/snippets/images/create_from_snapshot.py @@ -20,8 +20,11 @@ # [START compute_images_create_from_snapshot] +from __future__ import annotations + +from collections.abc import Iterable import sys -from typing import Any, Iterable, Optional +from typing import Any from google.api_core.extended_operation import ExtendedOperation from google.cloud import compute_v1 @@ -79,9 +82,9 @@ def create_image_from_snapshot( project_id: str, source_snapshot_name: str, image_name: str, - source_project_id: Optional[str] = None, - guest_os_features: Optional[Iterable[str]] = None, - storage_location: Optional[str] = None, + source_project_id: str | None = None, + guest_os_features: Iterable[str] | None = None, + storage_location: str | None = None, ) -> compute_v1.Image: """ Creates an image based on a snapshot. diff --git a/compute/client_library/snippets/images/delete.py b/compute/client_library/snippets/images/delete.py index d5b504337e02..9cee11334f17 100644 --- a/compute/client_library/snippets/images/delete.py +++ b/compute/client_library/snippets/images/delete.py @@ -20,6 +20,8 @@ # [START compute_images_delete] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/images/list.py b/compute/client_library/snippets/images/list.py index 3618295331fa..956274deaadb 100644 --- a/compute/client_library/snippets/images/list.py +++ b/compute/client_library/snippets/images/list.py @@ -20,7 +20,9 @@ # [START compute_images_get_list] -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 diff --git a/compute/client_library/snippets/images/set_deprecation_status.py b/compute/client_library/snippets/images/set_deprecation_status.py index bcf4bb764a85..f4b3db45a108 100644 --- a/compute/client_library/snippets/images/set_deprecation_status.py +++ b/compute/client_library/snippets/images/set_deprecation_status.py @@ -20,6 +20,8 @@ # [START compute_images_set_deprecation_status] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/instance_templates/create.py b/compute/client_library/snippets/instance_templates/create.py index 83937803335b..ca5d4479f6f1 100644 --- a/compute/client_library/snippets/instance_templates/create.py +++ b/compute/client_library/snippets/instance_templates/create.py @@ -20,6 +20,8 @@ # [START compute_template_create] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/instance_templates/create_from_instance.py b/compute/client_library/snippets/instance_templates/create_from_instance.py index 7ad0996315ad..729c157b9e09 100644 --- a/compute/client_library/snippets/instance_templates/create_from_instance.py +++ b/compute/client_library/snippets/instance_templates/create_from_instance.py @@ -20,6 +20,8 @@ # [START compute_template_create_from_instance] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/instance_templates/create_with_subnet.py b/compute/client_library/snippets/instance_templates/create_with_subnet.py index 7166a6d6a59a..6e1d79f50b1f 100644 --- a/compute/client_library/snippets/instance_templates/create_with_subnet.py +++ b/compute/client_library/snippets/instance_templates/create_with_subnet.py @@ -20,6 +20,8 @@ # [START compute_template_create_with_subnet] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/instance_templates/delete.py b/compute/client_library/snippets/instance_templates/delete.py index 6c2176f1e457..8a11acb595d6 100644 --- a/compute/client_library/snippets/instance_templates/delete.py +++ b/compute/client_library/snippets/instance_templates/delete.py @@ -20,6 +20,8 @@ # [START compute_template_delete] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/instance_templates/list.py b/compute/client_library/snippets/instance_templates/list.py index 495686c62d83..4c19d8d7ae97 100644 --- a/compute/client_library/snippets/instance_templates/list.py +++ b/compute/client_library/snippets/instance_templates/list.py @@ -20,7 +20,9 @@ # [START compute_template_list] -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 diff --git a/compute/client_library/snippets/instances/bulk_insert.py b/compute/client_library/snippets/instances/bulk_insert.py index 42612cc04083..6c5ae97526d9 100644 --- a/compute/client_library/snippets/instances/bulk_insert.py +++ b/compute/client_library/snippets/instances/bulk_insert.py @@ -20,8 +20,11 @@ # [START compute_instances_bulk_insert] +from __future__ import annotations + +from collections.abc import Iterable import sys -from typing import Any, Iterable, Optional +from typing import Any import uuid from google.api_core.extended_operation import ExtendedOperation @@ -100,8 +103,8 @@ def bulk_insert_instance( template: compute_v1.InstanceTemplate, count: int, name_pattern: str, - min_count: Optional[int] = None, - labels: Optional[dict] = None, + min_count: int | None = None, + labels: dict | None = None, ) -> Iterable[compute_v1.Instance]: """ Create multiple VMs based on an Instance Template. The newly created instances will diff --git a/compute/client_library/snippets/instances/change_machine_type.py b/compute/client_library/snippets/instances/change_machine_type.py index f4a45fbf1fcd..c6fc76462b16 100644 --- a/compute/client_library/snippets/instances/change_machine_type.py +++ b/compute/client_library/snippets/instances/change_machine_type.py @@ -20,6 +20,8 @@ # [START compute_change_machine_type] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/instances/create.py b/compute/client_library/snippets/instances/create.py index a3c89e091975..5870636fae79 100644 --- a/compute/client_library/snippets/instances/create.py +++ b/compute/client_library/snippets/instances/create.py @@ -20,9 +20,11 @@ # [START compute_instances_create] +from __future__ import annotations + import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -136,14 +138,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", diff --git a/compute/client_library/snippets/instances/create_start_instance/create_from_custom_image.py b/compute/client_library/snippets/instances/create_start_instance/create_from_custom_image.py index 18922c5a25da..941a4f765819 100644 --- a/compute/client_library/snippets/instances/create_start_instance/create_from_custom_image.py +++ b/compute/client_library/snippets/instances/create_start_instance/create_from_custom_image.py @@ -20,9 +20,11 @@ # [START compute_instances_create_from_custom_image] +from __future__ import annotations + import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -136,14 +138,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", diff --git a/compute/client_library/snippets/instances/create_start_instance/create_from_public_image.py b/compute/client_library/snippets/instances/create_start_instance/create_from_public_image.py index fabfa544ca9d..e5e857fb962f 100644 --- a/compute/client_library/snippets/instances/create_start_instance/create_from_public_image.py +++ b/compute/client_library/snippets/instances/create_start_instance/create_from_public_image.py @@ -20,9 +20,11 @@ # [START compute_instances_create_from_image] +from __future__ import annotations + import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -136,14 +138,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", diff --git a/compute/client_library/snippets/instances/create_start_instance/create_from_snapshot.py b/compute/client_library/snippets/instances/create_start_instance/create_from_snapshot.py index 9a4b3cc7f63a..3d678af628b7 100644 --- a/compute/client_library/snippets/instances/create_start_instance/create_from_snapshot.py +++ b/compute/client_library/snippets/instances/create_start_instance/create_from_snapshot.py @@ -20,9 +20,11 @@ # [START compute_instances_create_from_snapshot] +from __future__ import annotations + import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -118,14 +120,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", diff --git a/compute/client_library/snippets/instances/create_start_instance/create_windows_instance.py b/compute/client_library/snippets/instances/create_start_instance/create_windows_instance.py index e346622674ed..22d5801dcbca 100644 --- a/compute/client_library/snippets/instances/create_start_instance/create_windows_instance.py +++ b/compute/client_library/snippets/instances/create_start_instance/create_windows_instance.py @@ -21,9 +21,11 @@ # [START compute_create_windows_instance_external_ip] # [START compute_create_windows_instance_internal_ip] +from __future__ import annotations + import re import sys -from typing import Any, List, Optional +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -137,14 +139,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", @@ -272,7 +274,7 @@ def create_windows_instance( machine_type: str, source_image_family: str = "windows-2022", network_link: str = "global/networks/default", - subnetwork_link: Optional[str] = None, + subnetwork_link: str | None = None, ) -> compute_v1.Instance: """ Creates a new Windows Server instance that has only an internal IP address. diff --git a/compute/client_library/snippets/instances/create_start_instance/create_with_additional_disk.py b/compute/client_library/snippets/instances/create_start_instance/create_with_additional_disk.py index 188f19664538..7727a3440fe2 100644 --- a/compute/client_library/snippets/instances/create_start_instance/create_with_additional_disk.py +++ b/compute/client_library/snippets/instances/create_start_instance/create_with_additional_disk.py @@ -20,9 +20,11 @@ # [START compute_instances_create_from_image_plus_empty_disk] +from __future__ import annotations + import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -166,14 +168,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", diff --git a/compute/client_library/snippets/instances/create_start_instance/create_with_existing_disks.py b/compute/client_library/snippets/instances/create_start_instance/create_with_existing_disks.py index 4e0a1b280d10..792421d9fc1c 100644 --- a/compute/client_library/snippets/instances/create_start_instance/create_with_existing_disks.py +++ b/compute/client_library/snippets/instances/create_start_instance/create_with_existing_disks.py @@ -20,9 +20,11 @@ # [START compute_instances_create_with_existing_disks] +from __future__ import annotations + import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -94,14 +96,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", @@ -223,7 +225,7 @@ def create_instance( def create_with_existing_disks( - project_id: str, zone: str, instance_name: str, disk_names: List[str] + project_id: str, zone: str, instance_name: str, disk_names: list[str] ) -> compute_v1.Instance: """ Create a new VM instance using selected disks. The first disk in disk_names will diff --git a/compute/client_library/snippets/instances/create_start_instance/create_with_local_ssd.py b/compute/client_library/snippets/instances/create_start_instance/create_with_local_ssd.py index c4769b8b1652..adfb3c76791c 100644 --- a/compute/client_library/snippets/instances/create_start_instance/create_with_local_ssd.py +++ b/compute/client_library/snippets/instances/create_start_instance/create_with_local_ssd.py @@ -20,9 +20,11 @@ # [START compute_instances_create_with_local_ssd] +from __future__ import annotations + import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -156,14 +158,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", diff --git a/compute/client_library/snippets/instances/create_start_instance/create_with_snapshotted_data_disk.py b/compute/client_library/snippets/instances/create_start_instance/create_with_snapshotted_data_disk.py index 74ba8857b726..b4435e368846 100644 --- a/compute/client_library/snippets/instances/create_start_instance/create_with_snapshotted_data_disk.py +++ b/compute/client_library/snippets/instances/create_start_instance/create_with_snapshotted_data_disk.py @@ -20,9 +20,11 @@ # [START compute_instances_create_from_image_plus_snapshot_disk] +from __future__ import annotations + import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -173,14 +175,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", diff --git a/compute/client_library/snippets/instances/create_with_subnet.py b/compute/client_library/snippets/instances/create_with_subnet.py index 29a650462540..8e8f7d75909f 100644 --- a/compute/client_library/snippets/instances/create_with_subnet.py +++ b/compute/client_library/snippets/instances/create_with_subnet.py @@ -20,9 +20,11 @@ # [START compute_instances_create_with_subnet] +from __future__ import annotations + import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -136,14 +138,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", diff --git a/compute/client_library/snippets/instances/custom_hostname/create.py b/compute/client_library/snippets/instances/custom_hostname/create.py index 382ef4448f43..31673e62b919 100644 --- a/compute/client_library/snippets/instances/custom_hostname/create.py +++ b/compute/client_library/snippets/instances/custom_hostname/create.py @@ -20,9 +20,11 @@ # [START compute_instances_create_custom_hostname] +from __future__ import annotations + import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -136,14 +138,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", diff --git a/compute/client_library/snippets/instances/custom_machine_types/create_shared_with_helper.py b/compute/client_library/snippets/instances/custom_machine_types/create_shared_with_helper.py index bb846dc83ad5..eae9288034ae 100644 --- a/compute/client_library/snippets/instances/custom_machine_types/create_shared_with_helper.py +++ b/compute/client_library/snippets/instances/custom_machine_types/create_shared_with_helper.py @@ -20,12 +20,14 @@ # [START compute_custom_machine_type_create_shared_with_helper] +from __future__ import annotations + from collections import namedtuple from enum import Enum from enum import unique import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -330,14 +332,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", diff --git a/compute/client_library/snippets/instances/custom_machine_types/create_with_helper.py b/compute/client_library/snippets/instances/custom_machine_types/create_with_helper.py index 161c21102db4..ca6d7365d61b 100644 --- a/compute/client_library/snippets/instances/custom_machine_types/create_with_helper.py +++ b/compute/client_library/snippets/instances/custom_machine_types/create_with_helper.py @@ -20,12 +20,14 @@ # [START compute_custom_machine_type_create_with_helper] +from __future__ import annotations + from collections import namedtuple from enum import Enum from enum import unique import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -330,14 +332,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", diff --git a/compute/client_library/snippets/instances/custom_machine_types/create_without_helper.py b/compute/client_library/snippets/instances/custom_machine_types/create_without_helper.py index f2b478d53811..d58333df3d2d 100644 --- a/compute/client_library/snippets/instances/custom_machine_types/create_without_helper.py +++ b/compute/client_library/snippets/instances/custom_machine_types/create_without_helper.py @@ -20,9 +20,11 @@ # [START compute_custom_machine_type_create_without_helper] +from __future__ import annotations + import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -136,14 +138,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", @@ -266,7 +268,7 @@ def create_instance( def create_custom_instances_no_helper( project_id: str, zone: str, instance_name: str, core_count: int, memory: int -) -> List[compute_v1.Instance]: +) -> list[compute_v1.Instance]: """ Create 7 new VM instances without using a CustomMachineType helper function. diff --git a/compute/client_library/snippets/instances/custom_machine_types/extra_mem_no_helper.py b/compute/client_library/snippets/instances/custom_machine_types/extra_mem_no_helper.py index 53b96ae649cd..8bb2c80683ac 100644 --- a/compute/client_library/snippets/instances/custom_machine_types/extra_mem_no_helper.py +++ b/compute/client_library/snippets/instances/custom_machine_types/extra_mem_no_helper.py @@ -20,9 +20,11 @@ # [START compute_custom_machine_type_extra_mem_no_helper] +from __future__ import annotations + import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -136,14 +138,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", @@ -266,7 +268,7 @@ def create_instance( def create_custom_instances_extra_mem( project_id: str, zone: str, instance_name: str, core_count: int, memory: int -) -> List[compute_v1.Instance]: +) -> list[compute_v1.Instance]: """ Create 3 new VM instances with extra memory without using a CustomMachineType helper class. diff --git a/compute/client_library/snippets/instances/custom_machine_types/update_memory.py b/compute/client_library/snippets/instances/custom_machine_types/update_memory.py index 33ff754a5d58..e2019721a4a9 100644 --- a/compute/client_library/snippets/instances/custom_machine_types/update_memory.py +++ b/compute/client_library/snippets/instances/custom_machine_types/update_memory.py @@ -20,6 +20,8 @@ # [START compute_custom_machine_type_update_memory] +from __future__ import annotations + import sys import time from typing import Any diff --git a/compute/client_library/snippets/instances/delete.py b/compute/client_library/snippets/instances/delete.py index 23c393aba754..d94e61b9a784 100644 --- a/compute/client_library/snippets/instances/delete.py +++ b/compute/client_library/snippets/instances/delete.py @@ -20,6 +20,8 @@ # [START compute_instances_delete] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/instances/delete_protection/create.py b/compute/client_library/snippets/instances/delete_protection/create.py index 8a0b483d6662..d8713dd6a5a6 100644 --- a/compute/client_library/snippets/instances/delete_protection/create.py +++ b/compute/client_library/snippets/instances/delete_protection/create.py @@ -20,9 +20,11 @@ # [START compute_delete_protection_create] +from __future__ import annotations + import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -136,14 +138,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", diff --git a/compute/client_library/snippets/instances/delete_protection/set.py b/compute/client_library/snippets/instances/delete_protection/set.py index dd4029ef5e13..cf47a7e52bb2 100644 --- a/compute/client_library/snippets/instances/delete_protection/set.py +++ b/compute/client_library/snippets/instances/delete_protection/set.py @@ -20,6 +20,8 @@ # [START compute_delete_protection_set] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/instances/from_instance_template/create_from_template.py b/compute/client_library/snippets/instances/from_instance_template/create_from_template.py index ed30732125d9..83c9f5949bb3 100644 --- a/compute/client_library/snippets/instances/from_instance_template/create_from_template.py +++ b/compute/client_library/snippets/instances/from_instance_template/create_from_template.py @@ -20,6 +20,8 @@ # [START compute_instances_create_from_template] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/instances/from_instance_template/create_from_template_with_overrides.py b/compute/client_library/snippets/instances/from_instance_template/create_from_template_with_overrides.py index 1181dde5397e..759a641a8b01 100644 --- a/compute/client_library/snippets/instances/from_instance_template/create_from_template_with_overrides.py +++ b/compute/client_library/snippets/instances/from_instance_template/create_from_template_with_overrides.py @@ -20,6 +20,8 @@ # [START compute_instances_create_from_template_with_overrides] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/instances/list.py b/compute/client_library/snippets/instances/list.py index 45830c72ea48..45b95dc56922 100644 --- a/compute/client_library/snippets/instances/list.py +++ b/compute/client_library/snippets/instances/list.py @@ -20,7 +20,9 @@ # [START compute_instances_list] -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 diff --git a/compute/client_library/snippets/instances/list_all.py b/compute/client_library/snippets/instances/list_all.py index 47302fe423a6..e584a61cef10 100644 --- a/compute/client_library/snippets/instances/list_all.py +++ b/compute/client_library/snippets/instances/list_all.py @@ -20,15 +20,17 @@ # [START compute_instances_list_all] +from __future__ import annotations + from collections import defaultdict -from typing import Dict, Iterable +from collections.abc import Iterable from google.cloud import compute_v1 def list_all_instances( project_id: str, -) -> Dict[str, Iterable[compute_v1.Instance]]: +) -> dict[str, Iterable[compute_v1.Instance]]: """ Returns a dictionary of all instances present in a project, grouped by their zone. diff --git a/compute/client_library/snippets/instances/preemptible/create_preemptible.py b/compute/client_library/snippets/instances/preemptible/create_preemptible.py index 12d18f7fde97..7fa2faee8111 100644 --- a/compute/client_library/snippets/instances/preemptible/create_preemptible.py +++ b/compute/client_library/snippets/instances/preemptible/create_preemptible.py @@ -20,9 +20,11 @@ # [START compute_preemptible_create] +from __future__ import annotations + import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -136,14 +138,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", diff --git a/compute/client_library/snippets/instances/preemptible/preemption_history.py b/compute/client_library/snippets/instances/preemptible/preemption_history.py index 23a1f79745d5..702f9a8e6b50 100644 --- a/compute/client_library/snippets/instances/preemptible/preemption_history.py +++ b/compute/client_library/snippets/instances/preemptible/preemption_history.py @@ -20,8 +20,9 @@ # [START compute_preemptible_history] +from __future__ import annotations + import datetime -from typing import List, Tuple from google.cloud import compute_v1 from google.cloud.compute_v1.services.zone_operations import pagers @@ -52,7 +53,7 @@ def list_zone_operations( def preemption_history( project_id: str, zone: str, instance_name: str = None -) -> List[Tuple[str, datetime.datetime]]: +) -> list[tuple[str, datetime.datetime]]: """ Get a list of preemption operations from given zone in a project. Optionally limit the results to instance name. diff --git a/compute/client_library/snippets/instances/reset.py b/compute/client_library/snippets/instances/reset.py index 59574970743c..c7441926a596 100644 --- a/compute/client_library/snippets/instances/reset.py +++ b/compute/client_library/snippets/instances/reset.py @@ -20,6 +20,8 @@ # [START compute_reset_instance] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/instances/resume.py b/compute/client_library/snippets/instances/resume.py index fd7b5961828d..1b088165c463 100644 --- a/compute/client_library/snippets/instances/resume.py +++ b/compute/client_library/snippets/instances/resume.py @@ -20,6 +20,8 @@ # [START compute_resume_instance] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/instances/spot/create.py b/compute/client_library/snippets/instances/spot/create.py index a46d3aaf7bd1..2ae3ac7cb053 100644 --- a/compute/client_library/snippets/instances/spot/create.py +++ b/compute/client_library/snippets/instances/spot/create.py @@ -20,9 +20,11 @@ # [START compute_spot_create] +from __future__ import annotations + import re import sys -from typing import Any, List +from typing import Any import warnings from google.api_core.extended_operation import ExtendedOperation @@ -136,14 +138,14 @@ def create_instance( project_id: str, zone: str, instance_name: str, - disks: List[compute_v1.AttachedDisk], + disks: list[compute_v1.AttachedDisk], machine_type: str = "n1-standard-1", network_link: str = "global/networks/default", subnetwork_link: str = None, internal_ip: str = None, external_access: bool = False, external_ipv4: str = None, - accelerators: List[compute_v1.AcceleratorConfig] = None, + accelerators: list[compute_v1.AcceleratorConfig] = None, preemptible: bool = False, spot: bool = False, instance_termination_action: str = "STOP", diff --git a/compute/client_library/snippets/instances/start.py b/compute/client_library/snippets/instances/start.py index 942773d512e2..0aea840dce54 100644 --- a/compute/client_library/snippets/instances/start.py +++ b/compute/client_library/snippets/instances/start.py @@ -20,6 +20,8 @@ # [START compute_start_instance] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/instances/start_encrypted.py b/compute/client_library/snippets/instances/start_encrypted.py index c71faa56d5fe..5127a35b7d49 100644 --- a/compute/client_library/snippets/instances/start_encrypted.py +++ b/compute/client_library/snippets/instances/start_encrypted.py @@ -20,6 +20,8 @@ # [START compute_start_enc_instance] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/instances/stop.py b/compute/client_library/snippets/instances/stop.py index ccc7dd62165f..9f5fc7955e75 100644 --- a/compute/client_library/snippets/instances/stop.py +++ b/compute/client_library/snippets/instances/stop.py @@ -20,6 +20,8 @@ # [START compute_stop_instance] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/instances/suspend.py b/compute/client_library/snippets/instances/suspend.py index 623ce1fc1b7c..496f7a6843c9 100644 --- a/compute/client_library/snippets/instances/suspend.py +++ b/compute/client_library/snippets/instances/suspend.py @@ -20,6 +20,8 @@ # [START compute_suspend_instance] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/operations/wait_for_extended_operation.py b/compute/client_library/snippets/operations/wait_for_extended_operation.py index c3778b271801..ffe7aabaf793 100644 --- a/compute/client_library/snippets/operations/wait_for_extended_operation.py +++ b/compute/client_library/snippets/operations/wait_for_extended_operation.py @@ -20,6 +20,8 @@ # [START compute_operation_extended_wait] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/routes/create.py b/compute/client_library/snippets/routes/create.py index 90194d550439..90a367f85a3e 100644 --- a/compute/client_library/snippets/routes/create.py +++ b/compute/client_library/snippets/routes/create.py @@ -20,8 +20,10 @@ # [START compute_route_create] +from __future__ import annotations + import sys -from typing import Any, Optional +from typing import Any from google.api_core.extended_operation import ExtendedOperation from google.cloud import compute_v1 @@ -81,11 +83,11 @@ def create_route( route_name: str, destination_range: str, *, - next_hop_gateway: Optional[str] = None, - next_hop_ip: Optional[str] = None, - next_hop_instance: Optional[str] = None, - next_hop_vpn_tunnel: Optional[str] = None, - next_hop_ilb: Optional[str] = None, + next_hop_gateway: str | None = None, + next_hop_ip: str | None = None, + next_hop_instance: str | None = None, + next_hop_vpn_tunnel: str | None = None, + next_hop_ilb: str | None = None, ) -> compute_v1.Route: """ Create a new route in selected network by providing a destination and next hop name. diff --git a/compute/client_library/snippets/routes/create_kms_route.py b/compute/client_library/snippets/routes/create_kms_route.py index 283f40308db0..e16d3f58bd22 100644 --- a/compute/client_library/snippets/routes/create_kms_route.py +++ b/compute/client_library/snippets/routes/create_kms_route.py @@ -32,8 +32,10 @@ # [START compute_create_route_windows_activation] +from __future__ import annotations + import sys -from typing import Any, Optional +from typing import Any from google.api_core.extended_operation import ExtendedOperation from google.cloud import compute_v1 @@ -93,11 +95,11 @@ def create_route( route_name: str, destination_range: str, *, - next_hop_gateway: Optional[str] = None, - next_hop_ip: Optional[str] = None, - next_hop_instance: Optional[str] = None, - next_hop_vpn_tunnel: Optional[str] = None, - next_hop_ilb: Optional[str] = None, + next_hop_gateway: str | None = None, + next_hop_ip: str | None = None, + next_hop_instance: str | None = None, + next_hop_vpn_tunnel: str | None = None, + next_hop_ilb: str | None = None, ) -> compute_v1.Route: """ Create a new route in selected network by providing a destination and next hop name. diff --git a/compute/client_library/snippets/routes/delete.py b/compute/client_library/snippets/routes/delete.py index f626e5903560..38424a4db428 100644 --- a/compute/client_library/snippets/routes/delete.py +++ b/compute/client_library/snippets/routes/delete.py @@ -20,6 +20,8 @@ # [START compute_route_delete] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/routes/list.py b/compute/client_library/snippets/routes/list.py index b4f83fb07d38..55f0182a8578 100644 --- a/compute/client_library/snippets/routes/list.py +++ b/compute/client_library/snippets/routes/list.py @@ -20,7 +20,9 @@ # [START compute_route_list] -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 diff --git a/compute/client_library/snippets/snapshots/create.py b/compute/client_library/snippets/snapshots/create.py index 73cb27088b99..b190cfadcfdf 100644 --- a/compute/client_library/snippets/snapshots/create.py +++ b/compute/client_library/snippets/snapshots/create.py @@ -20,8 +20,10 @@ # [START compute_snapshot_create] +from __future__ import annotations + import sys -from typing import Any, Optional +from typing import Any from google.api_core.extended_operation import ExtendedOperation from google.cloud import compute_v1 @@ -80,10 +82,10 @@ def create_snapshot( disk_name: str, snapshot_name: str, *, - zone: Optional[str] = None, - region: Optional[str] = None, - location: Optional[str] = None, - disk_project_id: Optional[str] = None, + zone: str | None = None, + region: str | None = None, + location: str | None = None, + disk_project_id: str | None = None, ) -> compute_v1.Snapshot: """ Create a snapshot of a disk. diff --git a/compute/client_library/snippets/snapshots/delete.py b/compute/client_library/snippets/snapshots/delete.py index aa99db709cd4..bcb42dd613ec 100644 --- a/compute/client_library/snippets/snapshots/delete.py +++ b/compute/client_library/snippets/snapshots/delete.py @@ -20,6 +20,8 @@ # [START compute_snapshot_delete] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/client_library/snippets/snapshots/delete_by_filter.py b/compute/client_library/snippets/snapshots/delete_by_filter.py index 6746622cc826..01f3a059a8d0 100644 --- a/compute/client_library/snippets/snapshots/delete_by_filter.py +++ b/compute/client_library/snippets/snapshots/delete_by_filter.py @@ -21,8 +21,11 @@ # [START compute_snapshot_delete_by_filter] +from __future__ import annotations + +from collections.abc import Iterable import sys -from typing import Any, Iterable +from typing import Any from google.api_core.extended_operation import ExtendedOperation from google.cloud import compute_v1 diff --git a/compute/client_library/snippets/snapshots/list.py b/compute/client_library/snippets/snapshots/list.py index f7db91dbbdcb..76586f0a508d 100644 --- a/compute/client_library/snippets/snapshots/list.py +++ b/compute/client_library/snippets/snapshots/list.py @@ -20,7 +20,9 @@ # [START compute_snapshot_list] -from typing import Iterable +from __future__ import annotations + +from collections.abc import Iterable from google.cloud import compute_v1 diff --git a/compute/client_library/snippets/usage_report/usage_reports.py b/compute/client_library/snippets/usage_report/usage_reports.py index 5b65a0a04f76..257d8a092d24 100644 --- a/compute/client_library/snippets/usage_report/usage_reports.py +++ b/compute/client_library/snippets/usage_report/usage_reports.py @@ -26,6 +26,8 @@ # [START compute_usage_report_set] # [START compute_usage_report_get] # [START compute_usage_report_disable] +from __future__ import annotations + import sys from typing import Any diff --git a/compute/load_balancing/create_certificate.py b/compute/load_balancing/create_certificate.py index 00924ce5815b..ad0ba1bdb652 100644 --- a/compute/load_balancing/create_certificate.py +++ b/compute/load_balancing/create_certificate.py @@ -13,15 +13,16 @@ # limitations under the License. # [START compute_certificate_create] +from __future__ import annotations + from pathlib import Path from pprint import pprint -from typing import Union from googleapiclient import discovery def create_certificate( - project_id: str, certificate_file: Union[str, Path], private_key_file: Union[str, Path], certificate_name: str, description: str = "Certificate created from a code sample." + project_id: str, certificate_file: str | Path, private_key_file: str | Path, certificate_name: str, description: str = "Certificate created from a code sample." ) -> None: """ Create a global SSL self-signed certificate within your Google Cloud project. diff --git a/compute/load_balancing/create_regional_certificate.py b/compute/load_balancing/create_regional_certificate.py index 172b1e36a47a..f3dcef8ca988 100644 --- a/compute/load_balancing/create_regional_certificate.py +++ b/compute/load_balancing/create_regional_certificate.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + from pathlib import Path # [START compute_certificate_create_regional] from pprint import pprint -from typing import Union from googleapiclient import discovery @@ -23,8 +24,8 @@ def create_regional_certificate( project_id: str, region: str, - certificate_file: Union[str, Path], - private_key_file: Union[str, Path], + certificate_file: str | Path, + private_key_file: str | Path, certificate_name: str, description: str = "Certificate created from a code sample." ) -> None: diff --git a/compute/oslogin/oslogin_service_account_ssh.py b/compute/oslogin/oslogin_service_account_ssh.py index 795c37830240..0ac8f125a25d 100644 --- a/compute/oslogin/oslogin_service_account_ssh.py +++ b/compute/oslogin/oslogin_service_account_ssh.py @@ -20,10 +20,11 @@ instance over SSH. This example uses zonal DNS names to address instances on the same internal VPC network. """ +from __future__ import annotations + import argparse import subprocess import time -from typing import List, Optional, Tuple import uuid from google.cloud import oslogin_v1 @@ -36,12 +37,12 @@ def execute( - cmd: List[str], - cwd: Optional[str] = None, - capture_output: Optional[bool] = False, - env: Optional[dict] = None, - raise_errors: Optional[bool] = True -) -> Tuple[int, str]: + cmd: list[str], + cwd: str | None = None, + capture_output: bool | None = False, + env: dict | None = None, + raise_errors: bool | None = True +) -> tuple[int, str]: """ Run an external command (wrapper for Python subprocess). @@ -166,11 +167,11 @@ def run_ssh(cmd: str, private_key_file: str, username: str, hostname: str) -> st def main( cmd, project: str, - instance: Optional[str] = None, - zone: Optional[str] = None, - account: Optional[str] = None, - hostname: Optional[str] = None, - oslogin: Optional[oslogin_v1.OsLoginServiceClient] = None + instance: str | None = None, + zone: str | None = None, + account: str | None = None, + hostname: str | None = None, + oslogin: oslogin_v1.OsLoginServiceClient | None = None ) -> None: """Runs a command on a remote system.""" diff --git a/container/snippets/create_cluster.py b/container/snippets/create_cluster.py index 949c6f6e2afa..a8b5ea815571 100644 --- a/container/snippets/create_cluster.py +++ b/container/snippets/create_cluster.py @@ -14,15 +14,16 @@ # limitations under the License. # [START gke_create_cluster] +from __future__ import annotations + import argparse import sys -from typing import Dict import backoff from google.cloud import container_v1 -def on_success(details: Dict[str, str]) -> None: +def on_success(details: dict[str, str]) -> None: """ A handler function to pass into the retry backoff algorithm as the function to be executed upon a successful attempt. @@ -33,7 +34,7 @@ def on_success(details: Dict[str, str]) -> None: print("Successfully created cluster after {elapsed:0.1f} seconds".format(**details)) -def on_failure(details: Dict[str, str]) -> None: +def on_failure(details: dict[str, str]) -> None: """ A handler function to pass into the retry backoff algorithm as the function to be executed upon a failed attempt. diff --git a/container/snippets/delete_cluster.py b/container/snippets/delete_cluster.py index 405de9788060..14c9e4ddd646 100644 --- a/container/snippets/delete_cluster.py +++ b/container/snippets/delete_cluster.py @@ -14,15 +14,16 @@ # limitations under the License. # [START gke_delete_cluster] +from __future__ import annotations + import argparse import sys -from typing import Dict import backoff from google.cloud import container_v1 -def on_success(details: Dict[str, str]) -> None: +def on_success(details: dict[str, str]) -> None: """ A handler function to pass into the retry backoff algorithm as the function to be executed upon a successful attempt. @@ -33,7 +34,7 @@ def on_success(details: Dict[str, str]) -> None: print("Successfully deleted cluster after {elapsed:0.1f} seconds".format(**details)) -def on_failure(details: Dict[str, str]) -> None: +def on_failure(details: dict[str, str]) -> None: """ A handler function to pass into the retry backoff algorithm as the function to be executed upon a failed attempt. diff --git a/contentwarehouse/snippets/set_acl_sample.py b/contentwarehouse/snippets/set_acl_sample.py index 32506f06dc09..8016913fbf7c 100644 --- a/contentwarehouse/snippets/set_acl_sample.py +++ b/contentwarehouse/snippets/set_acl_sample.py @@ -16,7 +16,9 @@ # [START contentwarehouse_set_acl] -from typing import Any, Dict, List +from __future__ import annotations + +from typing import Any from google.cloud import contentwarehouse @@ -31,7 +33,7 @@ def set_acl( project_number: str, location: str, - policy: Dict[str, List[Dict[str, Any]]], + policy: dict[str, list[dict[str, Any]]], user_id: str, document_id: str = '' ) -> None: diff --git a/data-science-onramp/vertex-ai/sklearn_test.py b/data-science-onramp/vertex-ai/sklearn_test.py index 8b0806abe53b..5e82e442b18b 100644 --- a/data-science-onramp/vertex-ai/sklearn_test.py +++ b/data-science-onramp/vertex-ai/sklearn_test.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import os import tarfile import time -from typing import Tuple import uuid from google.cloud import storage @@ -54,7 +55,7 @@ def shared_state() -> dict: @pytest.fixture(autouse=True) def setup_teardown( shared_state: dict, -) -> Tuple[storage.bucket.Bucket, aip.JobServiceClient]: +) -> tuple[storage.bucket.Bucket, aip.JobServiceClient]: storage_client = storage.Client() bucket = storage_client.create_bucket(STAGING_BUCKET, location=REGION) bucket.blob(f"{INPUT_DIR}/{TRAIN_DATA}").upload_from_filename( @@ -84,7 +85,7 @@ def setup_teardown( @pytest.mark.flaky(max_runs=3, min_passes=1) def test_sklearn( - setup_teardown: Tuple[storage.bucket.Bucket, aip.JobServiceClient], + setup_teardown: tuple[storage.bucket.Bucket, aip.JobServiceClient], shared_state: dict, ) -> None: bucket, aip_job_client = setup_teardown diff --git a/data-science-onramp/vertex-ai/tfkeras_test.py b/data-science-onramp/vertex-ai/tfkeras_test.py index 0eddd4f3bc2f..7cc4443488fc 100644 --- a/data-science-onramp/vertex-ai/tfkeras_test.py +++ b/data-science-onramp/vertex-ai/tfkeras_test.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import os import tarfile import time -from typing import Tuple import uuid from google.cloud import storage @@ -54,7 +55,7 @@ def shared_state() -> dict: @pytest.fixture(autouse=True) def setup_teardown( shared_state: dict, -) -> Tuple[storage.bucket.Bucket, aip.JobServiceClient]: +) -> tuple[storage.bucket.Bucket, aip.JobServiceClient]: storage_client = storage.Client() bucket = storage_client.create_bucket(STAGING_BUCKET, location=REGION) @@ -85,7 +86,7 @@ def setup_teardown( @pytest.mark.flaky(max_runs=3, min_passes=1) def test_tfkeras( - setup_teardown: Tuple[storage.bucket.Bucket, aip.JobServiceClient], + setup_teardown: tuple[storage.bucket.Bucket, aip.JobServiceClient], shared_state: dict, ) -> None: bucket, aip_job_client = setup_teardown diff --git a/dataflow/conftest.py b/dataflow/conftest.py index bdb423ecf704..2c3cfbb6bdba 100644 --- a/dataflow/conftest.py +++ b/dataflow/conftest.py @@ -10,6 +10,9 @@ # distributed under the License is distributed on an 'AS IS' BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +from __future__ import annotations + +from collections.abc import Callable, Iterable from dataclasses import dataclass from google.api_core.exceptions import NotFound import itertools @@ -21,7 +24,7 @@ import re import subprocess import time -from typing import Any, Callable, Dict, Iterable, Optional, Set +from typing import Any import uuid import pytest @@ -79,8 +82,7 @@ def storage_bucket(name: str) -> str: yield bucket.name # Print all the objects in the bucket before deleting for debugging. - logging.info( - f"Deleting bucket {bucket.name} with the following contents:") + logging.info(f"Deleting bucket {bucket.name} with the following contents:") total_files = 0 total_size = 0 for blob in bucket.list_blobs(): @@ -123,10 +125,9 @@ def bigquery_table( dataset_name: str, table_name: str, project: str = PROJECT, **kwargs ) -> str: from google.cloud import bigquery + bigquery_client = bigquery.Client() - table = bigquery.Table( - f"{project}.{dataset_name}.{table_name}", **kwargs - ) + table = bigquery.Table(f"{project}.{dataset_name}.{table_name}", **kwargs) result = bigquery_client.create_table(table) logging.info(f"Created bigquery_table: {result.full_table_id}") yield result.table_id @@ -147,7 +148,7 @@ def bigquery_table_exists( return False @staticmethod - def bigquery_query(query: str, region: str = REGION) -> Iterable[Dict[str, Any]]: + def bigquery_query(query: str, region: str = REGION) -> Iterable[dict[str, Any]]: from google.cloud import bigquery bigquery_client = bigquery.Client() @@ -160,8 +161,7 @@ def pubsub_topic(name: str, project: str = PROJECT) -> str: from google.cloud import pubsub publisher_client = pubsub.PublisherClient() - topic_path = publisher_client.topic_path( - project, Utils.hyphen_name(name)) + topic_path = publisher_client.topic_path(project, Utils.hyphen_name(name)) topic = publisher_client.create_topic(request={"name": topic_path}) logging.info(f"Created pubsub_topic: {topic.name}") @@ -171,8 +171,7 @@ def pubsub_topic(name: str, project: str = PROJECT) -> str: # library throws an error upon deletion. # We use gcloud for a workaround. See also: # https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4492 - cmd = ["gcloud", "pubsub", "--project", - project, "topics", "delete", topic.name] + cmd = ["gcloud", "pubsub", "--project", project, "topics", "delete", topic.name] logging.info(f"{cmd}") subprocess.check_call(cmd) logging.info(f"Deleted pubsub_topic: {topic.name}") @@ -227,8 +226,7 @@ def _infinite_publish_job() -> None: publisher_client = pubsub.PublisherClient() for i in itertools.count(): msg = new_msg(i) - publisher_client.publish( - topic_path, msg.encode("utf-8")).result() + publisher_client.publish(topic_path, msg.encode("utf-8")).result() time.sleep(sleep_sec) # Start a subprocess in the background to do the publishing. @@ -253,10 +251,10 @@ def _infinite_publish_job() -> None: @staticmethod def cloud_build_submit( - image_name: Optional[str] = None, - config: Optional[str] = None, + image_name: str | None = None, + config: str | None = None, source: str = ".", - substitutions: Optional[Dict[str, str]] = None, + substitutions: dict[str, str] | None = None, project: str = PROJECT, ) -> None: """Sends a Cloud Build job, if an image_name is provided it will be deleted at teardown.""" @@ -285,8 +283,7 @@ def cloud_build_submit( ] logging.info(f"{cmd}") subprocess.check_call(cmd) - logging.info( - f"Cloud build finished successfully: {config}") + logging.info(f"Cloud build finished successfully: {config}") yield f.read() except Exception as e: logging.exception(e) @@ -304,8 +301,7 @@ def cloud_build_submit( ] logging.info(f"{cmd}") subprocess.check_call(cmd) - logging.info( - f"Created image: gcr.io/{project}/{image_name}:{UUID}") + logging.info(f"Created image: gcr.io/{project}/{image_name}:{UUID}") yield f"{image_name}:{UUID}" else: raise ValueError("must specify either `config` or `image_name`") @@ -323,8 +319,7 @@ def cloud_build_submit( ] logging.info(f"{cmd}") subprocess.check_call(cmd) - logging.info( - f"Deleted image: gcr.io/{project}/{image_name}:{UUID}") + logging.info(f"Deleted image: gcr.io/{project}/{image_name}:{UUID}") @staticmethod def dataflow_job_url( @@ -370,7 +365,7 @@ def dataflow_job_id( raise ValueError(f"Dataflow job not found: job_name={job_name}") @staticmethod - def dataflow_jobs_get(job_id: str, project: str = PROJECT) -> Dict[str, Any]: + def dataflow_jobs_get(job_id: str, project: str = PROJECT) -> dict[str, Any]: from googleapiclient.discovery import build dataflow = build("dataflow", "v1b3") @@ -394,10 +389,10 @@ def dataflow_jobs_wait( job_id: str, project: str = PROJECT, region: str = REGION, - target_states: Set[str] = {"JOB_STATE_DONE"}, + target_states: set[str] = {"JOB_STATE_DONE"}, timeout_sec: str = TIMEOUT_SEC, poll_interval_sec: int = POLL_INTERVAL_SEC, - ) -> Optional[str]: + ) -> str | None: """For a list of all the valid states: https://cloud.google.com/dataflow/docs/reference/rest/v1b3/projects.jobs#Job.JobState """ @@ -431,8 +426,10 @@ def job_is_done() -> bool: return False Utils.wait_until(job_is_done, timeout_sec, poll_interval_sec) - assert job_is_done(), (f"Dataflow job is not done after {timeout_sec} seconds\n" - + Utils.dataflow_job_url(job_id, project, region)) + assert job_is_done(), ( + f"Dataflow job is not done after {timeout_sec} seconds\n" + + Utils.dataflow_job_url(job_id, project, region) + ) @staticmethod def dataflow_jobs_cancel( @@ -522,7 +519,7 @@ def dataflow_flex_template_run( job_name: str, template_path: str, bucket_name: str, - parameters: Dict[str, str] = {}, + parameters: dict[str, str] = {}, project: str = PROJECT, region: str = REGION, ) -> str: @@ -563,7 +560,7 @@ def dataflow_extensible_template_run( job_name: str, template_path: str, bucket_name: str, - parameters: Dict[str, str] = {}, + parameters: dict[str, str] = {}, project: str = PROJECT, region: str = REGION, ) -> str: diff --git a/dataflow/custom-containers/miniconda/main.py b/dataflow/custom-containers/miniconda/main.py index ea0e54bff519..3badb100e3d8 100644 --- a/dataflow/custom-containers/miniconda/main.py +++ b/dataflow/custom-containers/miniconda/main.py @@ -14,15 +14,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import logging import platform -from typing import List, Optional import apache_beam as beam from apache_beam.options.pipeline_options import PipelineOptions -def run(beam_args: Optional[List[str]] = None) -> None: +def run(beam_args: list[str] | None = None) -> None: beam_options = PipelineOptions(beam_args, save_main_session=True) pipeline = beam.Pipeline(options=beam_options) ( diff --git a/dataflow/custom-containers/minimal/main.py b/dataflow/custom-containers/minimal/main.py index ea0e54bff519..3badb100e3d8 100644 --- a/dataflow/custom-containers/minimal/main.py +++ b/dataflow/custom-containers/minimal/main.py @@ -14,15 +14,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import logging import platform -from typing import List, Optional import apache_beam as beam from apache_beam.options.pipeline_options import PipelineOptions -def run(beam_args: Optional[List[str]] = None) -> None: +def run(beam_args: list[str] | None = None) -> None: beam_options = PipelineOptions(beam_args, save_main_session=True) pipeline = beam.Pipeline(options=beam_options) ( diff --git a/dataflow/custom-containers/ubuntu/main.py b/dataflow/custom-containers/ubuntu/main.py index ea0e54bff519..3badb100e3d8 100644 --- a/dataflow/custom-containers/ubuntu/main.py +++ b/dataflow/custom-containers/ubuntu/main.py @@ -14,15 +14,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import logging import platform -from typing import List, Optional import apache_beam as beam from apache_beam.options.pipeline_options import PipelineOptions -def run(beam_args: Optional[List[str]] = None) -> None: +def run(beam_args: list[str] | None = None) -> None: beam_options = PipelineOptions(beam_args, save_main_session=True) pipeline = beam.Pipeline(options=beam_options) ( diff --git a/dataflow/flex-templates/streaming_beam/streaming_beam.py b/dataflow/flex-templates/streaming_beam/streaming_beam.py index ab2ecfc9087b..4b360f2ba75f 100644 --- a/dataflow/flex-templates/streaming_beam/streaming_beam.py +++ b/dataflow/flex-templates/streaming_beam/streaming_beam.py @@ -20,11 +20,13 @@ writes the results to BigQuery. """ +from __future__ import annotations + import argparse import json import logging import time -from typing import Any, Dict, List +from typing import Any import apache_beam as beam from apache_beam.options.pipeline_options import PipelineOptions @@ -42,7 +44,7 @@ ) -def parse_json_message(message: str) -> Dict[str, Any]: +def parse_json_message(message: str) -> dict[str, Any]: """Parse the input json message and add 'score' & 'processing_time' keys.""" row = json.loads(message) return { @@ -56,7 +58,7 @@ def run( input_subscription: str, output_table: str, window_interval_sec: int = 60, - beam_args: List[str] = None, + beam_args: list[str] = None, ) -> None: """Build and run the pipeline.""" options = PipelineOptions(beam_args, save_main_session=True, streaming=True) diff --git a/dataflow/gpu-examples/pytorch-minimal/main.py b/dataflow/gpu-examples/pytorch-minimal/main.py index b939b33fa538..d7cf1c92d5d7 100644 --- a/dataflow/gpu-examples/pytorch-minimal/main.py +++ b/dataflow/gpu-examples/pytorch-minimal/main.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import argparse import logging -from typing import List, Optional import apache_beam as beam from apache_beam.options.pipeline_options import PipelineOptions @@ -31,7 +32,7 @@ def check_gpus(_: None, gpus_optional: bool = False) -> None: raise RuntimeError("No GPUs found.") -def run(input_text: str, beam_args: Optional[List[str]] = None) -> None: +def run(input_text: str, beam_args: list[str] | None = None) -> None: beam_options = PipelineOptions(beam_args, save_main_session=True) pipeline = beam.Pipeline(options=beam_options) ( diff --git a/dataflow/gpu-examples/tensorflow-landsat-prime/main.py b/dataflow/gpu-examples/tensorflow-landsat-prime/main.py index 60a2bd8ec38d..012e364d1bcc 100644 --- a/dataflow/gpu-examples/tensorflow-landsat-prime/main.py +++ b/dataflow/gpu-examples/tensorflow-landsat-prime/main.py @@ -48,11 +48,13 @@ - Create a JPEG image and save it to Cloud Storage. """ +from __future__ import annotations + import argparse import logging import os import re -from typing import Any, Dict, List, Optional, Tuple +from typing import Any import apache_beam as beam from apache_beam.options.pipeline_options import PipelineOptions @@ -127,12 +129,12 @@ def check_gpus(_: None, gpus_optional: bool = False) -> None: raise RuntimeError("No GPUs found.") -def get_band_paths(scene: str, band_names: List[str]) -> Tuple[str, List[str]]: +def get_band_paths(scene: str, band_names: list[str]) -> tuple[str, list[str]]: """Gets the Cloud Storage paths for each band in a Landsat scene. Args: scene: Landsat 8 scene ID. - band_names: List of the band names corresponding to [Red, Green, Blue] channels. + band_names: list of the band names corresponding to [Red, Green, Blue] channels. Returns: A (scene, band_paths) pair. @@ -175,11 +177,11 @@ def save_to_gcs( def load_as_rgb( scene: str, - band_paths: List[str], + band_paths: list[str], min_value: float = DEFAULT_MIN_BAND_VALUE, max_value: float = DEFAULT_MAX_BAND_VALUE, gamma: float = DEFAULT_GAMMA, -) -> Tuple[str, np.ndarray]: +) -> tuple[str, np.ndarray]: """Loads a scene's bands data and converts it into a pixel-ready format for an RGB image. @@ -231,17 +233,17 @@ def read_band(band_path: str) -> np.ndarray: def run( - scenes: List[str], + scenes: list[str], output_path_prefix: str, - vis_params: Dict[str, Any], + vis_params: dict[str, Any], gpu_type: str = DEFAULT_GPU_TYPE, gpu_count: int = DEFAULT_GPU_COUNT, - beam_args: Optional[List[str]] = None, + beam_args: list[str] | None = None, ) -> None: """Load multiple Landsat scenes and render them as JPEG files. Args: - scenes: List of Landsat 8 scene IDs. + scenes: list of Landsat 8 scene IDs. output_path_prefix: Path prefix to save the output files. vis_params: Visualization parameters including {rgb_bands, min, max, gamma}. beam_args: Optional list of arguments for Beam pipeline options. diff --git a/dataflow/gpu-examples/tensorflow-landsat/main.py b/dataflow/gpu-examples/tensorflow-landsat/main.py index 408268dfa6d7..fbbddab6462c 100644 --- a/dataflow/gpu-examples/tensorflow-landsat/main.py +++ b/dataflow/gpu-examples/tensorflow-landsat/main.py @@ -48,11 +48,13 @@ - Create a JPEG image and save it to Cloud Storage. """ +from __future__ import annotations + import argparse import logging import os import re -from typing import Any, Dict, List, Optional, Tuple +from typing import Any import apache_beam as beam from apache_beam.options.pipeline_options import PipelineOptions @@ -121,12 +123,12 @@ def check_gpus(_: None, gpus_optional: bool = False) -> None: raise RuntimeError("No GPUs found.") -def get_band_paths(scene: str, band_names: List[str]) -> Tuple[str, List[str]]: +def get_band_paths(scene: str, band_names: list[str]) -> tuple[str, list[str]]: """Gets the Cloud Storage paths for each band in a Landsat scene. Args: scene: Landsat 8 scene ID. - band_names: List of the band names corresponding to [Red, Green, Blue] channels. + band_names: list of the band names corresponding to [Red, Green, Blue] channels. Returns: A (scene, band_paths) pair. @@ -151,7 +153,7 @@ def get_band_paths(scene: str, band_names: List[str]) -> Tuple[str, List[str]]: return scene, band_paths -def load_values(scene: str, band_paths: List[str]) -> Tuple[str, np.ndarray]: +def load_values(scene: str, band_paths: list[str]) -> tuple[str, np.ndarray]: """Loads a scene's bands data as a numpy array. Args: @@ -181,7 +183,7 @@ def preprocess_pixels( min_value: float = 0.0, max_value: float = 1.0, gamma: float = 1.0, -) -> Tuple[str, tf.Tensor]: +) -> tuple[str, tf.Tensor]: """Prepares the band data into a pixel-ready format for an RGB image. The input band values come in the shape (band, width, height) with @@ -236,15 +238,15 @@ def save_to_gcs( def run( - scenes: List[str], + scenes: list[str], output_path_prefix: str, - vis_params: Dict[str, Any], - beam_args: Optional[List[str]] = None, + vis_params: dict[str, Any], + beam_args: list[str] | None = None, ) -> None: """Load multiple Landsat scenes and render them as JPEG files. Args: - scenes: List of Landsat 8 scene IDs. + scenes: list of Landsat 8 scene IDs. output_path_prefix: Path prefix to save the output files. vis_params: Visualization parameters including {rgb_bands, min, max, gamma}. beam_args: Optional list of arguments for Beam pipeline options. diff --git a/dataflow/gpu-examples/tensorflow-minimal/main.py b/dataflow/gpu-examples/tensorflow-minimal/main.py index 6732d95392f9..f09aaa08e141 100644 --- a/dataflow/gpu-examples/tensorflow-minimal/main.py +++ b/dataflow/gpu-examples/tensorflow-minimal/main.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import argparse import logging -from typing import List, Optional import apache_beam as beam from apache_beam.options.pipeline_options import PipelineOptions @@ -32,7 +33,7 @@ def check_gpus(_: None, gpus_optional: bool = False) -> None: raise RuntimeError("No GPUs found.") -def run(input_text: str, beam_args: Optional[List[str]] = None) -> None: +def run(input_text: str, beam_args: list[str] | None = None) -> None: beam_options = PipelineOptions(beam_args, save_main_session=True) pipeline = beam.Pipeline(options=beam_options) ( diff --git a/datastore/cloud-client/tasks.py b/datastore/cloud-client/tasks.py index d0fddd33304a..fb7f2466b730 100644 --- a/datastore/cloud-client/tasks.py +++ b/datastore/cloud-client/tasks.py @@ -11,9 +11,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import argparse import datetime -from typing import Union # [START datastore_add_entity] # [START datastore_build_service] @@ -63,7 +64,7 @@ def add_task(client: datastore.Client, description: str): # [START datastore_update_entity] -def mark_done(client: datastore.Client, task_id: Union[str, int]): +def mark_done(client: datastore.Client, task_id: str | int): with client.transaction(): # Create a key for an entity of kind "Task", and with the supplied # `task_id` as its Id @@ -98,7 +99,7 @@ def list_tasks(client: datastore.Client): # [START datastore_delete_entity] -def delete_task(client: datastore.Client, task_id: Union[str, int]): +def delete_task(client: datastore.Client, task_id: str | int): # Create a key for an entity of kind "Task", and with the supplied # `task_id` as its Id key = client.key("Task", task_id) diff --git a/discoveryengine/import_documents_sample.py b/discoveryengine/import_documents_sample.py index aa152ce83b1e..72d19082b12a 100644 --- a/discoveryengine/import_documents_sample.py +++ b/discoveryengine/import_documents_sample.py @@ -14,7 +14,8 @@ # # [START genappbuilder_import_documents] -from typing import Optional +from __future__ import annotations + from google.cloud import discoveryengine_v1beta as genappbuilder @@ -34,9 +35,9 @@ def import_documents_sample( project_id: str, location: str, search_engine_id: str, - gcs_uri: Optional[str] = None, - bigquery_dataset: Optional[str] = None, - bigquery_table: Optional[str] = None, + gcs_uri: str | None = None, + bigquery_dataset: str | None = None, + bigquery_table: str | None = None, ) -> None: # Create a client client = genappbuilder.DocumentServiceClient() diff --git a/discoveryengine/list_operations_sample.py b/discoveryengine/list_operations_sample.py index 0ae150e368db..f7993cf569fd 100644 --- a/discoveryengine/list_operations_sample.py +++ b/discoveryengine/list_operations_sample.py @@ -13,7 +13,8 @@ # limitations under the License. # [START genappbuilder_list_operations] -from typing import Optional +from __future__ import annotations + from google.cloud import discoveryengine_v1beta as genappbuilder @@ -32,7 +33,7 @@ def list_operations_sample( project_id: str, location: str, search_engine_id: str, - operations_filter: Optional[str] = None, + operations_filter: str | None = None, ) -> None: # Create a client client = genappbuilder.DocumentServiceClient() diff --git a/dlp/snippets/deid.py b/dlp/snippets/deid.py index 86cc58ba34be..bc6edbb7f425 100644 --- a/dlp/snippets/deid.py +++ b/dlp/snippets/deid.py @@ -14,9 +14,9 @@ """Uses of the Data Loss Prevention API for deidentifying sensitive data.""" +from __future__ import annotations import argparse -from typing import List # [START dlp_deidentify_masking] @@ -83,6 +83,7 @@ def deidentify_with_mask( # [END dlp_deidentify_masking] + # [START dlp_deidentify_redact] def deidentify_with_redact( project, @@ -135,6 +136,7 @@ def deidentify_with_redact( # [END dlp_deidentify_redact] + # [START dlp_deidentify_replace] def deidentify_with_replace( project, @@ -293,6 +295,7 @@ def deidentify_with_fpe( # [END dlp_deidentify_fpe] + # [START dlp_deidentify_deterministic] def deidentify_with_deterministic( project, @@ -942,7 +945,7 @@ def deidentify_with_simple_word_list( project: str, input_str: str, custom_info_type_name: str, - word_list: List[str], + word_list: list[str], ) -> None: """Uses the Data Loss Prevention API to de-identify sensitive data in a string by matching against custom word list. @@ -965,7 +968,7 @@ def deidentify_with_simple_word_list( custom_info_types = [ { "info_type": {"name": custom_info_type_name}, - "dictionary": {"word_list": word_list} + "dictionary": {"word_list": word_list}, } ] @@ -978,9 +981,7 @@ def deidentify_with_simple_word_list( deidentify_config = { "info_type_transformations": { "transformations": [ - { - "primitive_transformation": {"replace_with_info_type_config": {}} - } + {"primitive_transformation": {"replace_with_info_type_config": {}}} ] } } @@ -1008,12 +1009,7 @@ def deidentify_with_simple_word_list( # [START dlp_deidentify_exception_list] -def deidentify_with_exception_list( - project, - content_string, - info_types, - exception_list -): +def deidentify_with_exception_list(project, content_string, info_types, exception_list): """Uses the Data Loss Prevention API to de-identify sensitive data in a string but ignore matches against custom list. @@ -1100,7 +1096,7 @@ def deidentify_table_bucketing( deid_content_list, bucket_size, bucketing_lower_bound, - bucketing_upper_bound + bucketing_upper_bound, ): """Uses the Data Loss Prevention API to de-identify sensitive data in a table by replacing them with fixed size bucket ranges. @@ -1176,7 +1172,7 @@ def deidentify_table_bucketing( fixed_size_bucketing_config = { "bucket_size": bucket_size, "lower_bound": {"integer_value": bucketing_lower_bound}, - "upper_bound": {"integer_value": bucketing_upper_bound} + "upper_bound": {"integer_value": bucketing_upper_bound}, } # Specify fields to be de-identified @@ -1190,18 +1186,16 @@ def deidentify_table_bucketing( "fields": deid_content_list, "primitive_transformation": { "fixed_size_bucketing_config": fixed_size_bucketing_config - } + }, } ] } } # Call the API. - response = dlp.deidentify_content(request={ - "parent": parent, - "deidentify_config": deidentify_config, - "item": item - }) + response = dlp.deidentify_content( + request={"parent": parent, "deidentify_config": deidentify_config, "item": item} + ) # Print the results. print(f"Table after de-identification: {response.item.table}") @@ -1209,6 +1203,7 @@ def deidentify_table_bucketing( # Return the response. return response.item.table + # [END dlp_deidentify_table_bucketing] @@ -1220,7 +1215,7 @@ def deidentify_table_condition_replace_with_info_types( info_types, condition_field=None, condition_operator=None, - condition_value=None + condition_value=None, ): """Uses the Data Loss Prevention API to de-identify sensitive data in a table by replacing them with info-types based on a condition. @@ -1301,7 +1296,7 @@ def deidentify_table_condition_replace_with_info_types( { "field": {"name": condition_field}, "operator": condition_operator, - "value": {"integer_value": condition_value} + "value": {"integer_value": condition_value}, } ] @@ -1313,16 +1308,16 @@ def deidentify_table_condition_replace_with_info_types( "info_type_transformations": { "transformations": [ { - "primitive_transformation": {"replace_with_info_type_config": {}} + "primitive_transformation": { + "replace_with_info_type_config": {} + } } ] }, "fields": deid_field_list, "condition": { - "expressions": { - "conditions": {"conditions": condition} - } - } + "expressions": {"conditions": {"conditions": condition}} + }, } ] } @@ -1337,13 +1332,15 @@ def deidentify_table_condition_replace_with_info_types( "parent": parent, "deidentify_config": deidentify_config, "item": item, - "inspect_config": inspect_config - }) + "inspect_config": inspect_config, + } + ) print(f"Table after de-identification: {response.item.table}") return response.item.table + # [END dlp_deidentify_table_condition_infotypes] @@ -1355,7 +1352,7 @@ def deidentify_table_condition_masking( condition_field=None, condition_operator=None, condition_value=None, - masking_character=None + masking_character=None, ): """ Uses the Data Loss Prevention API to de-identify sensitive data in a table by masking them based on a condition. @@ -1435,7 +1432,7 @@ def deidentify_table_condition_masking( { "field": {"name": condition_field}, "operator": condition_operator, - "value": {"integer_value": condition_value} + "value": {"integer_value": condition_value}, } ] @@ -1451,10 +1448,8 @@ def deidentify_table_condition_masking( }, "fields": deid_content_list, "condition": { - "expressions": { - "conditions": {"conditions": condition} - } - } + "expressions": {"conditions": {"conditions": condition}} + }, } ] } @@ -1465,11 +1460,8 @@ def deidentify_table_condition_masking( # Call the API. response = dlp.deidentify_content( - request={ - "parent": parent, - "deidentify_config": deidentify_config, - "item": item - }) + request={"parent": parent, "deidentify_config": deidentify_config, "item": item} + ) # Print the result print(f"Table after de-identification: {response.item.table}") @@ -1477,15 +1469,13 @@ def deidentify_table_condition_masking( # Return the response return response.item.table + # [END dlp_deidentify_table_condition_masking] # [START dlp_deidentify_table_infotypes] def deidentify_table_replace_with_info_types( - project, - table_data, - info_types, - deid_content_list + project, table_data, info_types, deid_content_list ): """ Uses the Data Loss Prevention API to de-identify sensitive data in a table by replacing them with info type. @@ -1551,7 +1541,9 @@ def deidentify_table_replace_with_info_types( "info_type_transformations": { "transformations": [ { - "primitive_transformation": {"replace_with_info_type_config": {}} + "primitive_transformation": { + "replace_with_info_type_config": {} + } } ] }, @@ -1570,8 +1562,9 @@ def deidentify_table_replace_with_info_types( "parent": parent, "deidentify_config": deidentify_config, "item": item, - "inspect_config": inspect_config - }) + "inspect_config": inspect_config, + } + ) # Print the result print(f"Table after de-identification: {response.item.table}") @@ -1817,7 +1810,7 @@ def deidentify_table_replace_with_info_types( deid_word_list_parser = subparsers.add_parser( "deid_simple_word_list", - help="Deidentify sensitive data in a string against a custom simple word list" + help="Deidentify sensitive data in a string against a custom simple word list", ) deid_word_list_parser.add_argument( "project", @@ -1838,7 +1831,7 @@ def deidentify_table_replace_with_info_types( deid_exception_list_parser = subparsers.add_parser( "deid_exception_list", - help="De-identify sensitive data in a string , ignore matches against a custom word list" + help="De-identify sensitive data in a string , ignore matches against a custom word list", ) deid_exception_list_parser.add_argument( "project", @@ -1853,7 +1846,7 @@ def deidentify_table_replace_with_info_types( nargs="+", help="Strings representing info types to look for. A full list of " "info categories and types is available from the API. Examples " - 'include "FIRST_NAME", "LAST_NAME", "EMAIL_ADDRESS". ' + 'include "FIRST_NAME", "LAST_NAME", "EMAIL_ADDRESS". ', ) deid_exception_list_parser.add_argument( "exception_list", @@ -1863,7 +1856,7 @@ def deidentify_table_replace_with_info_types( table_bucketing_parser = subparsers.add_parser( "deid_table_bucketing", help="De-identify sensitive data in a table by replacing " - "them with fixed size bucket ranges.", + "them with fixed size bucket ranges.", ) table_bucketing_parser.add_argument( "--project", @@ -1874,8 +1867,7 @@ def deidentify_table_replace_with_info_types( help="Json string representing table data", ) table_bucketing_parser.add_argument( - "--deid_content_list", - help="A list of fields in table to de-identify." + "--deid_content_list", help="A list of fields in table to de-identify." ) table_bucketing_parser.add_argument( "--bucket_size", @@ -1904,8 +1896,7 @@ def deidentify_table_replace_with_info_types( help="Json string representing table data", ) table_condition_replace_parser.add_argument( - "deid_content_list", - help="A list of fields in table to de-identify." + "deid_content_list", help="A list of fields in table to de-identify." ) table_condition_replace_parser.add_argument( "--info_types", @@ -1916,8 +1907,7 @@ def deidentify_table_replace_with_info_types( ) table_condition_replace_parser.add_argument( "--condition_field", - help="A table Field within the record this condition is evaluated " - "against.", + help="A table Field within the record this condition is evaluated " "against.", ) table_condition_replace_parser.add_argument( "--condition_operator", @@ -1945,13 +1935,11 @@ def deidentify_table_replace_with_info_types( help="Json string representing table data", ) table_condition_mask_parser.add_argument( - "deid_content_list", - help="A list of fields in table to de-identify." + "deid_content_list", help="A list of fields in table to de-identify." ) table_condition_mask_parser.add_argument( "--condition_field", - help="A table Field within the record this condition is evaluated " - "against.", + help="A table Field within the record this condition is evaluated " "against.", ) table_condition_mask_parser.add_argument( "--condition_operator", @@ -2080,7 +2068,7 @@ def deidentify_table_replace_with_info_types( args.info_types, condition_field=args.condition_field, condition_operator=args.condition_operator, - condition_value=args.condition_value + condition_value=args.condition_value, ) elif args.content == "deid_table_condition_mask": deidentify_table_condition_masking( @@ -2090,7 +2078,7 @@ def deidentify_table_replace_with_info_types( condition_field=args.condition_field, condition_operator=args.condition_operator, condition_value=args.condition_value, - masking_character=args.masking_character + masking_character=args.masking_character, ) elif args.content == "table_replace_with_infotype": deidentify_table_replace_with_info_types( diff --git a/dlp/snippets/jobs.py b/dlp/snippets/jobs.py index bfb9cf7e7c63..9d8b4c0e7dcd 100644 --- a/dlp/snippets/jobs.py +++ b/dlp/snippets/jobs.py @@ -14,9 +14,9 @@ """Sample app to list and delete DLP jobs using the Data Loss Prevent API. """ +from __future__ import annotations import argparse -from typing import List # [START dlp_list_jobs] @@ -119,7 +119,7 @@ def delete_dlp_job(project, job_name): def create_dlp_job( project: str, bucket: str, - info_types: List[str], + info_types: list[str], job_id: str = None, max_findings: int = 100, auto_populate_timespan: bool = True, @@ -175,16 +175,13 @@ def create_dlp_job( # Call the API. response = dlp.create_dlp_job( - request={ - "parent": parent, - "inspect_job": job, - "job_id": job_id - } + request={"parent": parent, "inspect_job": job, "job_id": job_id} ) # Print out the result. print(f"Job : {response.name} status: {response.state}") + # [END dlp_create_job] @@ -234,18 +231,18 @@ def create_dlp_job( create_parser.add_argument( "bucket", help="The name of the GCS bucket to scan. This sample scans all files " - "in the bucket." + "in the bucket.", ) create_parser.add_argument( "--info_types", nargs="+", help="Strings representing info types to look for. A full list of " - "info categories and types is available from the API. Examples " - 'include "FIRST_NAME", "LAST_NAME", "EMAIL_ADDRESS". ' + "info categories and types is available from the API. Examples " + 'include "FIRST_NAME", "LAST_NAME", "EMAIL_ADDRESS". ', ) create_parser.add_argument( "--job_id", - help="The id of the job. If omitted, an id will be randomly generated." + help="The id of the job. If omitted, an id will be randomly generated.", ) create_parser.add_argument( "--max_findings", diff --git a/documentai/snippets/process_document_form_sample.py b/documentai/snippets/process_document_form_sample.py index f3b323bdd292..fe362ac955ae 100644 --- a/documentai/snippets/process_document_form_sample.py +++ b/documentai/snippets/process_document_form_sample.py @@ -15,7 +15,9 @@ # [START documentai_process_form_document] -from typing import Sequence +from __future__ import annotations + +from collections.abc import Sequence from google.api_core.client_options import ClientOptions from google.cloud import documentai diff --git a/documentai/snippets/process_document_ocr_sample.py b/documentai/snippets/process_document_ocr_sample.py index a8fb6605e09a..f34d0a3b756b 100644 --- a/documentai/snippets/process_document_ocr_sample.py +++ b/documentai/snippets/process_document_ocr_sample.py @@ -15,7 +15,9 @@ # [START documentai_process_ocr_document] -from typing import Sequence +from __future__ import annotations + +from collections.abc import Sequence from google.api_core.client_options import ClientOptions from google.cloud import documentai diff --git a/documentai/snippets/process_document_splitter_sample.py b/documentai/snippets/process_document_splitter_sample.py index 99a7e54e3ca4..5a6e9b0045f1 100644 --- a/documentai/snippets/process_document_splitter_sample.py +++ b/documentai/snippets/process_document_splitter_sample.py @@ -15,7 +15,9 @@ # [START documentai_process_splitter_document] -from typing import Sequence +from __future__ import annotations + +from collections.abc import Sequence from google.api_core.client_options import ClientOptions from google.cloud import documentai diff --git a/enterpriseknowledgegraph/search/lookup_public_kg_sample.py b/enterpriseknowledgegraph/search/lookup_public_kg_sample.py index 09903332a3ea..32b92438839e 100644 --- a/enterpriseknowledgegraph/search/lookup_public_kg_sample.py +++ b/enterpriseknowledgegraph/search/lookup_public_kg_sample.py @@ -15,7 +15,9 @@ # [START enterpriseknowledgegraph_lookup_public_kg] -from typing import Sequence +from __future__ import annotations + +from collections.abc import Sequence from google.cloud import enterpriseknowledgegraph as ekg diff --git a/enterpriseknowledgegraph/search/lookup_sample.py b/enterpriseknowledgegraph/search/lookup_sample.py index 5ba77ee51351..a57b52da612b 100644 --- a/enterpriseknowledgegraph/search/lookup_sample.py +++ b/enterpriseknowledgegraph/search/lookup_sample.py @@ -15,7 +15,9 @@ # [START enterpriseknowledgegraph_lookup] -from typing import Sequence +from __future__ import annotations + +from collections.abc import Sequence from google.cloud import enterpriseknowledgegraph as ekg diff --git a/enterpriseknowledgegraph/search/search_public_kg_sample.py b/enterpriseknowledgegraph/search/search_public_kg_sample.py index 5e940296cbba..412347f099fc 100644 --- a/enterpriseknowledgegraph/search/search_public_kg_sample.py +++ b/enterpriseknowledgegraph/search/search_public_kg_sample.py @@ -15,7 +15,9 @@ # [START enterpriseknowledgegraph_search_public_kg] -from typing import Sequence +from __future__ import annotations + +from collections.abc import Sequence from google.cloud import enterpriseknowledgegraph as ekg diff --git a/enterpriseknowledgegraph/search/search_sample.py b/enterpriseknowledgegraph/search/search_sample.py index 7fdf30bb9862..8c6f8dc49ea7 100644 --- a/enterpriseknowledgegraph/search/search_sample.py +++ b/enterpriseknowledgegraph/search/search_sample.py @@ -15,7 +15,9 @@ # [START enterpriseknowledgegraph_search] -from typing import Sequence +from __future__ import annotations + +from collections.abc import Sequence from google.cloud import enterpriseknowledgegraph as ekg diff --git a/generative_ai/tuning.py b/generative_ai/tuning.py index 8a4b506a0c44..46c66937a897 100644 --- a/generative_ai/tuning.py +++ b/generative_ai/tuning.py @@ -13,7 +13,8 @@ # limitations under the License. # [START aiplatform_sdk_tuning] -from typing import Union +from __future__ import annotations + import pandas as pd @@ -24,7 +25,7 @@ def tuning( project_id: str, location: str, - training_data: Union[pd.DataFrame, str], + training_data: pd.DataFrame | str, train_steps: int = 10, ): """Tune a new model, based on a prompt-response data. diff --git a/logging/redaction/log_redaction.py b/logging/redaction/log_redaction.py index 0b79e0940d94..efd26ebdbd5f 100644 --- a/logging/redaction/log_redaction.py +++ b/logging/redaction/log_redaction.py @@ -13,10 +13,11 @@ # limitations under the License. +from __future__ import annotations + import argparse import json import logging -from typing import List from apache_beam import CombineFn, CombineGlobally, DoFn, io, ParDo, Pipeline, WindowInto from apache_beam.error import PipelineError @@ -96,7 +97,7 @@ def run( pubsub_subscription: str, destination_log_name: str, window_size: float, - pipeline_args: List[str] = None + pipeline_args: list[str] = None ) -> None: '''Runs Dataflow pipeline''' diff --git a/logging/redaction/log_redaction_final.py b/logging/redaction/log_redaction_final.py index f837609ceea3..58d35d390d70 100644 --- a/logging/redaction/log_redaction_final.py +++ b/logging/redaction/log_redaction_final.py @@ -13,10 +13,11 @@ # limitations under the License. +from __future__ import annotations + import argparse import json import logging -from typing import List from apache_beam import CombineFn, CombineGlobally, DoFn, io, ParDo, Pipeline, WindowInto from apache_beam.error import PipelineError @@ -169,7 +170,7 @@ def run( pubsub_subscription: str, destination_log_name: str, window_size: float, - pipeline_args: List[str] = None + pipeline_args: list[str] = None ) -> None: '''Runs Dataflow pipeline''' diff --git a/noxfile-template.py b/noxfile-template.py index 397b6644dd3d..a848c68792be 100644 --- a/noxfile-template.py +++ b/noxfile-template.py @@ -12,13 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations from __future__ import print_function +from collections.abc import Callable import glob import os from pathlib import Path import sys -from typing import Callable, Dict, List, Optional import nox @@ -71,7 +72,7 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars() -> Dict[str, str]: +def get_pytest_env_vars() -> dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} @@ -105,7 +106,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # -def _determine_local_import_names(start_dir: str) -> List[str]: +def _determine_local_import_names(start_dir: str) -> list[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -257,7 +258,7 @@ def py(session: nox.sessions.Session) -> None: # -def _get_repo_root() -> Optional[str]: +def _get_repo_root() -> str | None: """Returns the root folder of the project.""" # Get root of this repository. # Assume we don't have directories nested deeper than 10 items. diff --git a/people-and-planet-ai/geospatial-classification/e2e_test.py b/people-and-planet-ai/geospatial-classification/e2e_test.py index 5d5e641df291..0a53144d7f6a 100644 --- a/people-and-planet-ai/geospatial-classification/e2e_test.py +++ b/people-and-planet-ai/geospatial-classification/e2e_test.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + from datetime import datetime, timedelta import logging import os diff --git a/people-and-planet-ai/geospatial-classification/task.py b/people-and-planet-ai/geospatial-classification/task.py index 2dbc4806cafb..63c3ad585613 100644 --- a/people-and-planet-ai/geospatial-classification/task.py +++ b/people-and-planet-ai/geospatial-classification/task.py @@ -27,8 +27,9 @@ https://developers.google.com/earth-engine/datasets/catalog/COPERNICUS_S2 """ +from __future__ import annotations + import argparse -from typing import Tuple import tensorflow as tf @@ -80,7 +81,7 @@ def create_features_dict() -> dict: def get_feature_and_label_vectors( inputs: dict, features_dict: dict -) -> Tuple[tf.Tensor, int]: +) -> tuple[tf.Tensor, int]: """Formats data.""" label_value = tf.cast(inputs.pop(LABEL), tf.int32) @@ -90,7 +91,7 @@ def get_feature_and_label_vectors( return features_vec, label_value -def create_datasets(bucket: str) -> Tuple[tf.data.Dataset, tf.data.Dataset]: +def create_datasets(bucket: str) -> tuple[tf.data.Dataset, tf.data.Dataset]: """Creates training and validation datasets.""" train_data_dir = f"gs://{bucket}/geospatial_training.tfrecord.gz" diff --git a/people-and-planet-ai/image-classification/create_images_metadata_table.py b/people-and-planet-ai/image-classification/create_images_metadata_table.py index 462ec136539f..8651e8c9b6dc 100644 --- a/people-and-planet-ai/image-classification/create_images_metadata_table.py +++ b/people-and-planet-ai/image-classification/create_images_metadata_table.py @@ -14,11 +14,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + +from collections.abc import Iterable import io import json import logging import os -from typing import Dict, Iterable, Optional import zipfile import apache_beam as beam @@ -43,7 +45,7 @@ def run( bigquery_dataset: str, bigquery_table: str, - pipeline_options: Optional[PipelineOptions] = None, + pipeline_options: PipelineOptions | None = None, ) -> None: """Creates the images metadata table in BigQuery. @@ -92,7 +94,7 @@ def run( ) -def get_images_metadata(metadata_url: str) -> Iterable[Dict[str, str]]: +def get_images_metadata(metadata_url: str) -> Iterable[dict[str, str]]: """Returns an iterable of {'category', 'file_name'} dicts. """ content = requests.get(metadata_url).content with zipfile.ZipFile(io.BytesIO(content)) as zf: diff --git a/people-and-planet-ai/image-classification/predict.py b/people-and-planet-ai/image-classification/predict.py index 9b50d0594097..86cad8f9049c 100644 --- a/people-and-planet-ai/image-classification/predict.py +++ b/people-and-planet-ai/image-classification/predict.py @@ -14,8 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import base64 -from typing import List, Tuple from google.cloud import aiplatform from google.cloud.aiplatform.gapic.schema import predict @@ -26,7 +27,7 @@ def run( project: str, region: str, model_endpoint_id: str, image_file: str -) -> List[Tuple[str, float]]: +) -> list[tuple[str, float]]: """Sends an image from the LILA WCS database for prediction. Args: diff --git a/people-and-planet-ai/image-classification/train_model.py b/people-and-planet-ai/image-classification/train_model.py index bd959d3d80d4..cfef1cde6444 100644 --- a/people-and-planet-ai/image-classification/train_model.py +++ b/people-and-planet-ai/image-classification/train_model.py @@ -14,12 +14,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + +from collections.abc import Callable, Iterable from datetime import datetime import io import logging import random import time -from typing import Callable, Dict, Iterable, Optional, Tuple, TypeVar +from typing import TypeVar import apache_beam as beam from apache_beam.options.pipeline_options import PipelineOptions @@ -41,7 +44,7 @@ def run( min_images_per_class: int, max_images_per_class: int, budget_milli_node_hours: int, - pipeline_options: Optional[PipelineOptions] = None, + pipeline_options: PipelineOptions | None = None, ) -> None: """Creates a balanced dataset and signals AI Platform to train a model. @@ -104,8 +107,8 @@ def run( def get_image( - image_info: Dict[str, str], cloud_storage_path: str -) -> Iterable[Tuple[str, str]]: + image_info: dict[str, str], cloud_storage_path: str +) -> Iterable[tuple[str, str]]: """Makes sure an image exists in Cloud Storage. Checks if the image file_name exists in Cloud Storage. @@ -113,7 +116,7 @@ def get_image( If the image can't be downloaded, it is skipped. Args: - image_info: Dict of {'category', 'file_name'}. + image_info: dict of {'category', 'file_name'}. cloud_storage_path: Cloud Storage path to look for and download images. Returns: @@ -144,7 +147,7 @@ def get_image( def write_dataset_csv_file( - dataset_csv_filename: str, images: Iterable[Tuple[str, str]] + dataset_csv_filename: str, images: Iterable[tuple[str, str]] ) -> str: """Writes the dataset image file names and categories in a CSV file. @@ -172,7 +175,7 @@ def write_dataset_csv_file( def create_dataset( dataset_csv_filename: str, project: str, region: str, dataset_name: str -) -> Tuple[str, str]: +) -> tuple[str, str]: """Creates an dataset for AI Platform. For more information: diff --git a/people-and-planet-ai/timeseries-classification/create_datasets.py b/people-and-planet-ai/timeseries-classification/create_datasets.py index ca04c436cfee..7659a58d8907 100644 --- a/people-and-planet-ai/timeseries-classification/create_datasets.py +++ b/people-and-planet-ai/timeseries-classification/create_datasets.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import logging import random -from typing import List import apache_beam as beam from apache_beam.options.pipeline_options import PipelineOptions @@ -30,8 +31,8 @@ def run( raw_labels_dir: str, train_data_dir: str, eval_data_dir: str, - train_eval_split: List[int], - beam_args: List[str], + train_eval_split: list[int], + beam_args: list[str], ) -> str: labels = pd.concat( [ diff --git a/people-and-planet-ai/timeseries-classification/data_utils.py b/people-and-planet-ai/timeseries-classification/data_utils.py index 2cf0b68533e6..ceea7960120c 100644 --- a/people-and-planet-ai/timeseries-classification/data_utils.py +++ b/people-and-planet-ai/timeseries-classification/data_utils.py @@ -13,10 +13,12 @@ # limitations under the License. +from __future__ import annotations + +from collections.abc import Iterable from datetime import datetime, timedelta import os import time -from typing import Dict, Iterable import numpy as np import pandas as pd @@ -34,7 +36,7 @@ def to_unix_time(timestamp: datetime) -> int: return time.mktime(timestamp.timetuple()) -def with_fixed_time_steps(input_data: Dict[str, np.ndarray]) -> pd.DataFrame: +def with_fixed_time_steps(input_data: dict[str, np.ndarray]) -> pd.DataFrame: return ( pd.DataFrame(input_data) .assign(timestamp=lambda df: df["timestamp"].map(datetime.utcfromtimestamp)) @@ -86,7 +88,7 @@ def label_data(data: pd.DataFrame, labels: pd.DataFrame) -> pd.DataFrame: ) -def generate_training_points(data: pd.DataFrame) -> Iterable[Dict[str, np.ndarray]]: +def generate_training_points(data: pd.DataFrame) -> Iterable[dict[str, np.ndarray]]: # Pandas assigns NaN (Not-a-Number) if a value is missing. # If is_fishing equals itself it means it's populated because (NaN != NaN). # For the training data points, we only get points where we have a label. diff --git a/people-and-planet-ai/timeseries-classification/predict.py b/people-and-planet-ai/timeseries-classification/predict.py index 8c430275b1b4..91d55e097754 100644 --- a/people-and-planet-ai/timeseries-classification/predict.py +++ b/people-and-planet-ai/timeseries-classification/predict.py @@ -12,7 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, List, Optional +from __future__ import annotations + import numpy as np import pandas as pd @@ -21,10 +22,10 @@ import data_utils import trainer -model: Optional[keras.Model] = None +model: keras.Model | None = None -def predict(model: keras.Model, inputs: Dict[str, np.ndarray]) -> pd.DataFrame: +def predict(model: keras.Model, inputs: dict[str, np.ndarray]) -> pd.DataFrame: data = data_utils.with_fixed_time_steps(inputs) # Our model always expects a batch prediction, so we create a batch with @@ -39,7 +40,7 @@ def predict(model: keras.Model, inputs: Dict[str, np.ndarray]) -> pd.DataFrame: return data[trainer.PADDING:].assign(is_fishing=predictions["is_fishing"][0]) -def run(model_dir: str, inputs: Dict[str, List[float]]) -> Dict[str, np.ndarray]: +def run(model_dir: str, inputs: dict[str, list[float]]) -> dict[str, np.ndarray]: # Cache the model so it only has to be loaded once per runtime. global model if model is None: diff --git a/people-and-planet-ai/timeseries-classification/trainer.py b/people-and-planet-ai/timeseries-classification/trainer.py index 255283f09032..68e16ceb5786 100644 --- a/people-and-planet-ai/timeseries-classification/trainer.py +++ b/people-and-planet-ai/timeseries-classification/trainer.py @@ -12,10 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + from functools import reduce import logging import os -from typing import Dict, Tuple, TypeVar +from typing import TypeVar import tensorflow as tf from tensorflow import keras @@ -39,9 +41,9 @@ def validated( - tensor_dict: Dict[str, tf.Tensor], - spec_dict: Dict[str, tf.TypeSpec], -) -> Dict[str, tf.Tensor]: + tensor_dict: dict[str, tf.Tensor], + spec_dict: dict[str, tf.TypeSpec], +) -> dict[str, tf.Tensor]: for field, spec in spec_dict.items(): if field not in tensor_dict: raise KeyError( @@ -58,7 +60,7 @@ def validated( return tensor_dict -def serialize(value_dict: Dict[str, a]) -> bytes: +def serialize(value_dict: dict[str, a]) -> bytes: spec_dict = {**INPUTS_SPEC, **OUTPUTS_SPEC} tensor_dict = { field: tf.convert_to_tensor(value, spec_dict[field].dtype) @@ -83,7 +85,7 @@ def serialize(value_dict: Dict[str, a]) -> bytes: def deserialize( serialized_example: bytes, -) -> Tuple[Dict[str, tf.Tensor], Dict[str, tf.Tensor]]: +) -> tuple[dict[str, tf.Tensor], dict[str, tf.Tensor]]: features = { field: tf.io.FixedLenFeature(shape=(), dtype=tf.string) for field in [*INPUTS_SPEC.keys(), *OUTPUTS_SPEC.keys()] @@ -95,7 +97,7 @@ def parse_tensor(bytes_value: bytes, spec: tf.TypeSpec) -> tf.Tensor: tensor.set_shape(spec.shape) return tensor - def parse_features(spec_dict: Dict[str, tf.TypeSpec]) -> Dict[str, tf.Tensor]: + def parse_features(spec_dict: dict[str, tf.TypeSpec]) -> dict[str, tf.Tensor]: tensor_dict = { field: parse_tensor(bytes_value, spec_dict[field]) for field, bytes_value in example.items() @@ -142,7 +144,7 @@ def geo_point(lat_name: str, lon_name: str) -> keras.layers.Layer: # We transform each (lat, lon) pair into a 3D point in the unit sphere. # https://en.wikipedia.org/wiki/Spherical_coordinate_system#Cartesian_coordinates class GeoPoint(keras.layers.Layer): - def call(self: a, latlon: Tuple[tf.Tensor, tf.Tensor]) -> tf.Tensor: + def call(self: a, latlon: tuple[tf.Tensor, tf.Tensor]) -> tf.Tensor: lat, lon = latlon x = tf.cos(lon) * tf.sin(lat) y = tf.sin(lon) * tf.sin(lat) diff --git a/people-and-planet-ai/weather-forecasting/create_dataset.py b/people-and-planet-ai/weather-forecasting/create_dataset.py index 99307e233ffd..9c80290830f2 100644 --- a/people-and-planet-ai/weather-forecasting/create_dataset.py +++ b/people-and-planet-ai/weather-forecasting/create_dataset.py @@ -20,7 +20,6 @@ from datetime import datetime, timedelta import logging import random -from typing import List, Optional import uuid import apache_beam as beam @@ -148,7 +147,7 @@ def run( num_bins: int = NUM_BINS, max_requests: int = MAX_REQUESTS, min_batch_size: int = MIN_BATCH_SIZE, - beam_args: Optional[List[str]] = None, + beam_args: list[str] | None = None, ) -> None: """Runs an Apache Beam pipeline to create a dataset. diff --git a/people-and-planet-ai/weather-forecasting/serving/weather-model/weather/model.py b/people-and-planet-ai/weather-forecasting/serving/weather-model/weather/model.py index 3eb274dbb3b9..66c29a50af59 100644 --- a/people-and-planet-ai/weather-forecasting/serving/weather-model/weather/model.py +++ b/people-and-planet-ai/weather-forecasting/serving/weather-model/weather/model.py @@ -16,7 +16,7 @@ from __future__ import annotations -from typing import Any as AnyType, Optional +from typing import Any as AnyType from datasets.arrow_dataset import Dataset import numpy as np @@ -83,7 +83,7 @@ def __init__(self, config: WeatherConfig) -> None: ) def forward( - self, inputs: torch.Tensor, labels: Optional[torch.Tensor] = None + self, inputs: torch.Tensor, labels: torch.Tensor | None = None ) -> dict[str, torch.Tensor]: """Computes predictions as expected by ModelOutputs. diff --git a/pubsublite/spark-connector/spark_streaming_test.py b/pubsublite/spark-connector/spark_streaming_test.py index b962f2de4315..93f4d5874e45 100644 --- a/pubsublite/spark-connector/spark_streaming_test.py +++ b/pubsublite/spark-connector/spark_streaming_test.py @@ -12,11 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations +from collections.abc import Generator import os import pathlib import re -from typing import Generator import uuid from google.api_core.exceptions import NotFound diff --git a/recaptcha_enterprise/demosite/app/urls.py b/recaptcha_enterprise/demosite/app/urls.py index 7cf89c933d09..efa8915d87d0 100644 --- a/recaptcha_enterprise/demosite/app/urls.py +++ b/recaptcha_enterprise/demosite/app/urls.py @@ -11,11 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import configparser import enum import json import os -from typing import Tuple from flask import jsonify, render_template, request, Response from google.cloud.recaptchaenterprise_v1 import Assessment @@ -288,7 +289,7 @@ def on_comment_submit() -> Response: # Classify the action as BAD/ NOT_BAD based on conditions specified. -def check_for_bad_action(assessment_response: Assessment, recaptcha_action: str) -> Tuple[str, str]: +def check_for_bad_action(assessment_response: Assessment, recaptcha_action: str) -> tuple[str, str]: reason = "" label = Label.NOT_BAD.value diff --git a/recaptcha_enterprise/snippets/test_create_assessment.py b/recaptcha_enterprise/snippets/test_create_assessment.py index 8225c545247a..a27a9b037dc9 100644 --- a/recaptcha_enterprise/snippets/test_create_assessment.py +++ b/recaptcha_enterprise/snippets/test_create_assessment.py @@ -18,7 +18,6 @@ import os import re import time -import typing from _pytest.capture import CaptureFixture from flask import Flask, render_template, url_for @@ -131,7 +130,7 @@ def test_mfa_assessment( assert re.search("Result unspecified. Trigger MFA challenge in the client by passing the request token.", out) -def get_token(recaptcha_site_key: str, browser: WebDriver) -> typing.Tuple: +def get_token(recaptcha_site_key: str, browser: WebDriver) -> tuple: browser.get(url_for("assess", site_key=recaptcha_site_key, _external=True)) time.sleep(5) diff --git a/run/deployment-previews/check_status.py b/run/deployment-previews/check_status.py index 5be143844eed..1b2972c2e3c6 100644 --- a/run/deployment-previews/check_status.py +++ b/run/deployment-previews/check_status.py @@ -14,10 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + +from collections.abc import Callable import os import re import sys -from typing import Callable import click import github diff --git a/run/deployment-previews/test_app.py b/run/deployment-previews/test_app.py index 2ffb3417954b..6438471dcfe2 100644 --- a/run/deployment-previews/test_app.py +++ b/run/deployment-previews/test_app.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import os from typing import NoReturn, TypeVar from unittest.mock import MagicMock, patch diff --git a/run/idp-sql/database.py b/run/idp-sql/database.py index 36106398ccc0..fc55c6b97427 100644 --- a/run/idp-sql/database.py +++ b/run/idp-sql/database.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import datetime import os from typing import Any diff --git a/run/idp-sql/middleware.py b/run/idp-sql/middleware.py index b308faccfab6..d763c760a2de 100644 --- a/run/idp-sql/middleware.py +++ b/run/idp-sql/middleware.py @@ -13,8 +13,11 @@ # limitations under the License. +from __future__ import annotations + +from collections.abc import Callable from functools import wraps -from typing import Callable, TypeVar +from typing import TypeVar import firebase_admin from firebase_admin import auth # noqa: F401 diff --git a/scripts/convert-types.py b/scripts/convert-types.py new file mode 100644 index 000000000000..0bbd7220b451 --- /dev/null +++ b/scripts/convert-types.py @@ -0,0 +1,322 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Script to convert type hints to follow PEP-0585 + +For more information, see https://peps.python.org/pep-0585 + +To run from the repository's root directory: + python convert-types.py +""" + +from __future__ import annotations + +from collections.abc import Callable, Iterator +import difflib +from glob import glob +import logging +import re +import sys +from typing import NamedTuple, TypeVar + +# TODO: +# - False positives with lambdas and dict comprehensions due to the `:` misinterpreted as a type hint +# - Sort imports case insensitive (e.g. PIL, Flask) +# - Type hint arguments can be lists like `Callable[[a, b], c]` + +# Cases not covered: +# - Multi-line imports like `from M import (\nA,\nB,\n)` +# - Importing `typing` directly like `import typing` and `x: typing.Any` +# - Parsing types with `|` syntax like Union or Optional +# - typing.re.Match --> re.Match +# - typing.re.Pattern --> re.Pattern + + +BUILTIN_TYPES = {"Tuple", "List", "Dict", "Set", "FrozenSet", "Type"} +COLLECTIONS_TYPES = {"Deque", "DefaultDict", "OrderedDict", "Counter", "ChainMap"} +COLLECTIONS_ABC_TYPES = { + "Awaitable", + "Coroutine", + "AsyncIterable", + "AsyncIterator", + "AsyncGenerator", + "Iterable", + "Iterator", + "Generator", + "Reversible", + "Container", + "Collection", + "Callable", + "AbstractSet", + "MutableSet", + "Mapping", + "MutableMapping", + "Sequence", + "MutableSequence", + "ByteString", + "MappingView", + "KeysView", + "ItemsView", + "ValuesView", +} +CONTEXTLIB_TYPES = {"ContextManager", "AsyncContextManager"} +RE_TYPES = {"Match", "Pattern"} + +RENAME_TYPES = { + "Tuple": "tuple", # builtin + "List": "list", # builtin + "Dict": "dict", # builtin + "Set": "set", # builtin + "FrozenSet": "frozenset", # builtin + "Type": "type", # builtin + "Deque": "deque", # collections + "DefaultDict": "defaultdict", # collections + "AbstractSet": "Set", # collections.abc + "ContextManager": "AbstractContextManager", # contextlib + "AsyncContextManager": "AbstractAsyncContextManager", # contextlib +} + +# Parser a = String -> (a, String) +a = TypeVar("a") +Parser = Callable[[str], tuple[a, str]] + + +class TypeHint(NamedTuple): + name: str + args: list[TypeHint] + + def __repr__(self) -> str: + match (self.name, self.args): + case ("Optional", [x]): + return f"{x} | None" + case ("Union", args): + return " | ".join(map(str, args)) + case (name, []): + return name + case (name, args): + return f"{name}[{', '.join(map(str, args))}]" + + def patch(self, types: set[str]) -> TypeHint: + if self.name in types: + name = RENAME_TYPES.get(self.name, self.name) + else: + name = self.name + return TypeHint(name, [arg.patch(types) for arg in self.args]) + + +def patch_file(file_path: str, dry_run: bool = False, quiet: bool = False) -> None: + with open(file_path) as f: + before = f.read() + try: + lines = [line.rstrip() for line in before.splitlines()] + if types := find_typing_imports(lines): + lines = insert_import_annotations(lines) + lines = [patched for line in lines for patched in patch_imports(line)] + lines = sort_imports(lines) + after = patch_type_hints("\n".join(lines), types) + "\n" + if before == after: + return + + if not dry_run: + with open(file_path, "w") as f: + f.write(after) + print(file_path) + elif not quiet: + print(f"| {file_path}") + print(f"+--{'-' * len(file_path)}") + diffs = difflib.context_diff( + before.splitlines(keepends=True), + after.splitlines(keepends=True), + fromfile="Before changes", + tofile="After changes", + n=1, + ) + sys.stdout.writelines(diffs) + print(f"+{'=' * 100}") + print("| Press [ENTER] to continue to the next file") + input() + except Exception: + logging.exception(f"Could not process file: {file_path}") + + +def insert_import_annotations(lines: list[str]) -> list[str]: + new_import = "from __future__ import annotations" + if new_import in lines: + return lines + + match find_import(lines): + case None: + return lines + case i: + if lines[i].startswith("from __future__ import "): + return lines[:i] + [new_import] + lines[i:] + return lines[:i] + [new_import, ""] + lines[i:] + + +def find_typing_imports(lines: list[str]) -> set[str]: + return { + name.strip() + for line in lines + if line.startswith("from typing import ") + for name in line.split("import")[1].split(",") + } + + +def find_import(lines: list[str]) -> int | None: + for i, line in enumerate(lines): + if line.startswith(("import ", "from ")): + return i + return None + + +def get_imports_group(lines: list[str]) -> tuple[list[str], list[str]]: + for i, line in enumerate(lines): + if not line.strip() or line.startswith("#"): + return (lines[:i], lines[i:]) + return ([], lines) + + +def import_name(line: str) -> str: + match line.split(): + case ["import", name, *_]: + return name + case ["from", name, "import", *_]: + return name + raise ValueError(f"not an import: {line}") + + +def sort_imports(lines: list[str]) -> list[str]: + match find_import(lines): + case None: + return lines + case i: + (imports, left) = get_imports_group(lines[i:]) + if imports: + return lines[:i] + sorted(imports, key=import_name) + sort_imports(left) + return left + + +def patch_imports(line: str) -> Iterator[str]: + if not line.startswith("from typing import "): + yield line + return + + types = find_typing_imports([line]) + collections_types = types.intersection(COLLECTIONS_TYPES) + collections_abc_types = types.intersection(COLLECTIONS_ABC_TYPES) + contextlib_types = types.intersection(CONTEXTLIB_TYPES) + re_types = types.intersection(RE_TYPES) + typing_types = ( + types + - BUILTIN_TYPES + - COLLECTIONS_TYPES + - COLLECTIONS_ABC_TYPES + - CONTEXTLIB_TYPES + - RE_TYPES + - {"Optional", "Union"} + ) + + rename = lambda name: RENAME_TYPES.get(name, name) + if collections_types: + names = sorted(map(rename, collections_types)) + yield f"from collections import {', '.join(names)}" + if collections_abc_types: + names = sorted(map(rename, collections_abc_types)) + yield f"from collections.abc import {', '.join(names)}" + if contextlib_types: + names = sorted(map(rename, contextlib_types)) + yield f"from contextlib import {', '.join(names)}" + if re_types: + names = sorted(map(rename, re_types)) + yield f"from re import {', '.join(names)}" + if typing_types: + names = sorted(map(rename, typing_types)) + yield f"from typing import {', '.join(names)}" + + +def patch_type_hints(txt: str, types: set[str]) -> str: + if m := re.search(rf"(?:->|:) *(\w+)", txt): + (typ, left) = parse_type_hint(txt[m.start(1) :]) + return f"{txt[:m.start(1)]}{typ.patch(types)}{patch_type_hints(left, types)}" + return txt + + +# Parser combinators +def parse_text(src: str, txt: str) -> tuple[str, str]: + if src.startswith(txt): + return (src[: len(txt)], src[len(txt) :]) + raise SyntaxError("text") + + +def parse_identifier(src: str) -> tuple[str, str]: + if m := re.search(r"[\w\._]+", src): + return (m.group(), src[m.end() :]) + raise SyntaxError("name") + + +def parse_zero_or_more(src: str, parser: Parser[a]) -> tuple[list[a], str]: + try: + (x, src) = parser(src) + (xs, src) = parse_zero_or_more(src, parser) + return ([x] + xs, src) + except SyntaxError: + return ([], src) + + +def parse_comma_separated(src: str, parser: Parser[a]) -> tuple[list[a], str]: + def parse_next(src: str) -> tuple[a, str]: + (_, src) = parse_text(src, ",") + (_, src) = parse_zero_or_more(src, lambda src: parse_text(src, " ")) + return parser(src) + + try: + (x, src) = parser(src) + (xs, src) = parse_zero_or_more(src, parse_next) + return ([x] + xs, src) + except SyntaxError: + return ([], src) + + +def parse_type_hint(src: str) -> tuple[TypeHint, str]: + (name, src) = parse_identifier(src) + try: + (_, src) = parse_text(src, "[") + (args, src) = parse_comma_separated(src, parse_type_hint) + (_, src) = parse_text(src, "]") + return (TypeHint(name, args), src) + except SyntaxError: + return (TypeHint(name, []), src) + + +def run(patterns: list[str], dry_run: bool = False, quiet: bool = False): + for pattern in patterns: + for filename in glob(pattern, recursive=True): + patch_file(filename, dry_run, quiet) + + +if __name__ == "__main__": + import argparse + + assert sys.version_info.major == 3, "Requires Python 3" + assert sys.version_info.minor >= 10, "Requires Python >= 3.10 for pattern matching" + + parser = argparse.ArgumentParser() + parser.add_argument("patterns", nargs="*", default=["**/*.py"]) + parser.add_argument("--dry-run", action="store_true") + parser.add_argument("--quiet", action="store_true") + args = parser.parse_args() + + run(**args.__dict__)